var/home/core/zuul-output/0000755000175000017500000000000015071733352014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071760500015472 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006374735315071760467017734 0ustar rootrootOct 09 13:25:23 crc systemd[1]: Starting Kubernetes Kubelet... Oct 09 13:25:23 crc restorecon[4725]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:23 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 13:25:24 crc restorecon[4725]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 09 13:25:24 crc kubenswrapper[4762]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.688585 4762 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.694961 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.694995 4762 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695008 4762 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695020 4762 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695029 4762 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695039 4762 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695048 4762 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695057 4762 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695066 4762 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695074 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695082 4762 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695091 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695099 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695108 4762 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695116 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695139 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695175 4762 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695187 4762 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695197 4762 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695206 4762 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695215 4762 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695223 4762 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695232 4762 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695241 4762 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695249 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695260 4762 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695271 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695280 4762 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695288 4762 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695298 4762 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695306 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695315 4762 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695323 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695332 4762 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695340 4762 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695348 4762 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695357 4762 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695365 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695373 4762 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695382 4762 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695394 4762 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695404 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695413 4762 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695421 4762 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695430 4762 feature_gate.go:330] unrecognized feature gate: Example Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695438 4762 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695446 4762 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695454 4762 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695463 4762 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695472 4762 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695482 4762 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695491 4762 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695500 4762 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695508 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695517 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695528 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695536 4762 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695546 4762 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695555 4762 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695563 4762 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695572 4762 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695580 4762 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695588 4762 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695597 4762 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695605 4762 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695614 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695622 4762 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695659 4762 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695670 4762 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695679 4762 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.695690 4762 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695855 4762 flags.go:64] FLAG: --address="0.0.0.0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695872 4762 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695889 4762 flags.go:64] FLAG: --anonymous-auth="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695902 4762 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695914 4762 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695925 4762 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695937 4762 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695949 4762 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695960 4762 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695970 4762 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695982 4762 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.695992 4762 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696002 4762 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696012 4762 flags.go:64] FLAG: --cgroup-root="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696021 4762 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696031 4762 flags.go:64] FLAG: --client-ca-file="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696040 4762 flags.go:64] FLAG: --cloud-config="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696050 4762 flags.go:64] FLAG: --cloud-provider="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696060 4762 flags.go:64] FLAG: --cluster-dns="[]" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696071 4762 flags.go:64] FLAG: --cluster-domain="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696081 4762 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696091 4762 flags.go:64] FLAG: --config-dir="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696101 4762 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696112 4762 flags.go:64] FLAG: --container-log-max-files="5" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696123 4762 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696133 4762 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696144 4762 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696154 4762 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696163 4762 flags.go:64] FLAG: --contention-profiling="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696173 4762 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696182 4762 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696193 4762 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696204 4762 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696215 4762 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696225 4762 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696235 4762 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696244 4762 flags.go:64] FLAG: --enable-load-reader="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696254 4762 flags.go:64] FLAG: --enable-server="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696264 4762 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696276 4762 flags.go:64] FLAG: --event-burst="100" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696286 4762 flags.go:64] FLAG: --event-qps="50" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696297 4762 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696307 4762 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696316 4762 flags.go:64] FLAG: --eviction-hard="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696328 4762 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696338 4762 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696348 4762 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696358 4762 flags.go:64] FLAG: --eviction-soft="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696367 4762 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696376 4762 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696388 4762 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696397 4762 flags.go:64] FLAG: --experimental-mounter-path="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696407 4762 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696417 4762 flags.go:64] FLAG: --fail-swap-on="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696427 4762 flags.go:64] FLAG: --feature-gates="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696439 4762 flags.go:64] FLAG: --file-check-frequency="20s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696449 4762 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696459 4762 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696469 4762 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696479 4762 flags.go:64] FLAG: --healthz-port="10248" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696489 4762 flags.go:64] FLAG: --help="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696498 4762 flags.go:64] FLAG: --hostname-override="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696509 4762 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696519 4762 flags.go:64] FLAG: --http-check-frequency="20s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696529 4762 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696538 4762 flags.go:64] FLAG: --image-credential-provider-config="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696548 4762 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696558 4762 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696569 4762 flags.go:64] FLAG: --image-service-endpoint="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696578 4762 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696588 4762 flags.go:64] FLAG: --kube-api-burst="100" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696598 4762 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696608 4762 flags.go:64] FLAG: --kube-api-qps="50" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696618 4762 flags.go:64] FLAG: --kube-reserved="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696627 4762 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696665 4762 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696675 4762 flags.go:64] FLAG: --kubelet-cgroups="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696685 4762 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696695 4762 flags.go:64] FLAG: --lock-file="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696704 4762 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696714 4762 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696723 4762 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696738 4762 flags.go:64] FLAG: --log-json-split-stream="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696748 4762 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696758 4762 flags.go:64] FLAG: --log-text-split-stream="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696768 4762 flags.go:64] FLAG: --logging-format="text" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696778 4762 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696788 4762 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696798 4762 flags.go:64] FLAG: --manifest-url="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696808 4762 flags.go:64] FLAG: --manifest-url-header="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696820 4762 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696830 4762 flags.go:64] FLAG: --max-open-files="1000000" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696841 4762 flags.go:64] FLAG: --max-pods="110" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696852 4762 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696861 4762 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696871 4762 flags.go:64] FLAG: --memory-manager-policy="None" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696881 4762 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696891 4762 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696901 4762 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696911 4762 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696931 4762 flags.go:64] FLAG: --node-status-max-images="50" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696941 4762 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696951 4762 flags.go:64] FLAG: --oom-score-adj="-999" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696960 4762 flags.go:64] FLAG: --pod-cidr="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696972 4762 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696985 4762 flags.go:64] FLAG: --pod-manifest-path="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.696994 4762 flags.go:64] FLAG: --pod-max-pids="-1" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697004 4762 flags.go:64] FLAG: --pods-per-core="0" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697014 4762 flags.go:64] FLAG: --port="10250" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697024 4762 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697033 4762 flags.go:64] FLAG: --provider-id="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697043 4762 flags.go:64] FLAG: --qos-reserved="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697052 4762 flags.go:64] FLAG: --read-only-port="10255" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697062 4762 flags.go:64] FLAG: --register-node="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697072 4762 flags.go:64] FLAG: --register-schedulable="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697082 4762 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697097 4762 flags.go:64] FLAG: --registry-burst="10" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697107 4762 flags.go:64] FLAG: --registry-qps="5" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697116 4762 flags.go:64] FLAG: --reserved-cpus="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697126 4762 flags.go:64] FLAG: --reserved-memory="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697138 4762 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697147 4762 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697157 4762 flags.go:64] FLAG: --rotate-certificates="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697167 4762 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697177 4762 flags.go:64] FLAG: --runonce="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697187 4762 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697197 4762 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697207 4762 flags.go:64] FLAG: --seccomp-default="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697216 4762 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697226 4762 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697236 4762 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697246 4762 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697256 4762 flags.go:64] FLAG: --storage-driver-password="root" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697266 4762 flags.go:64] FLAG: --storage-driver-secure="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697275 4762 flags.go:64] FLAG: --storage-driver-table="stats" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697285 4762 flags.go:64] FLAG: --storage-driver-user="root" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697294 4762 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697304 4762 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697314 4762 flags.go:64] FLAG: --system-cgroups="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697323 4762 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697340 4762 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697349 4762 flags.go:64] FLAG: --tls-cert-file="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697359 4762 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697370 4762 flags.go:64] FLAG: --tls-min-version="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697380 4762 flags.go:64] FLAG: --tls-private-key-file="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697390 4762 flags.go:64] FLAG: --topology-manager-policy="none" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697400 4762 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697410 4762 flags.go:64] FLAG: --topology-manager-scope="container" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697419 4762 flags.go:64] FLAG: --v="2" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697432 4762 flags.go:64] FLAG: --version="false" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697445 4762 flags.go:64] FLAG: --vmodule="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697456 4762 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.697466 4762 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697727 4762 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697739 4762 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697750 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697761 4762 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697771 4762 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697781 4762 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697790 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697799 4762 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697809 4762 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697819 4762 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697829 4762 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697839 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697847 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697859 4762 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697871 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697880 4762 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697889 4762 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697898 4762 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697907 4762 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697918 4762 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697929 4762 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697938 4762 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697947 4762 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697958 4762 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697976 4762 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697986 4762 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.697995 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698003 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698011 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698023 4762 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698032 4762 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698040 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698048 4762 feature_gate.go:330] unrecognized feature gate: Example Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698057 4762 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698065 4762 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698074 4762 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698083 4762 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698091 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698100 4762 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698109 4762 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698130 4762 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698140 4762 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698148 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698156 4762 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698164 4762 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698173 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698181 4762 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698192 4762 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698203 4762 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698212 4762 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698221 4762 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698230 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698240 4762 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698249 4762 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698258 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698267 4762 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698279 4762 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698288 4762 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698296 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698317 4762 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698326 4762 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698339 4762 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698348 4762 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698358 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698367 4762 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698375 4762 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698386 4762 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698394 4762 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698406 4762 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698416 4762 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.698424 4762 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.699313 4762 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.709323 4762 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.709364 4762 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709469 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709483 4762 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709496 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709504 4762 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709511 4762 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709519 4762 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709526 4762 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709533 4762 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709540 4762 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709547 4762 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709553 4762 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709560 4762 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709567 4762 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709574 4762 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709581 4762 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709588 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709594 4762 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709601 4762 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709607 4762 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709614 4762 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709620 4762 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709627 4762 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709661 4762 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709672 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709679 4762 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709687 4762 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709694 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709702 4762 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709709 4762 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709717 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709725 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709735 4762 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709742 4762 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709751 4762 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709761 4762 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709769 4762 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709778 4762 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709785 4762 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709792 4762 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709800 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709807 4762 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709814 4762 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709820 4762 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709828 4762 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709834 4762 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709841 4762 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709847 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709854 4762 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709860 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709867 4762 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709875 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709884 4762 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709914 4762 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709922 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709928 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709935 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709941 4762 feature_gate.go:330] unrecognized feature gate: Example Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709948 4762 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709954 4762 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709961 4762 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709968 4762 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709974 4762 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709982 4762 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709989 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.709995 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710002 4762 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710009 4762 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710015 4762 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710022 4762 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710029 4762 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710036 4762 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.710049 4762 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710234 4762 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710247 4762 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710255 4762 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710264 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710271 4762 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710278 4762 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710285 4762 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710291 4762 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710298 4762 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710307 4762 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710316 4762 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710323 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710330 4762 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710338 4762 feature_gate.go:330] unrecognized feature gate: Example Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710346 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710353 4762 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710360 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710366 4762 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710373 4762 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710379 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710386 4762 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710392 4762 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710400 4762 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710408 4762 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710417 4762 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710423 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710430 4762 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710437 4762 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710443 4762 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710450 4762 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710457 4762 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710463 4762 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710470 4762 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710476 4762 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710484 4762 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710491 4762 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710498 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710504 4762 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710511 4762 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710518 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710526 4762 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710533 4762 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710540 4762 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710547 4762 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710554 4762 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710561 4762 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710567 4762 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710574 4762 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710580 4762 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710587 4762 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710593 4762 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710600 4762 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710606 4762 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710612 4762 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710620 4762 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710627 4762 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710692 4762 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710700 4762 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710708 4762 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710714 4762 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710723 4762 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710731 4762 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710739 4762 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710746 4762 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710754 4762 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710762 4762 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710770 4762 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710777 4762 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710784 4762 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710791 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.710801 4762 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.710813 4762 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.711937 4762 server.go:940] "Client rotation is on, will bootstrap in background" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.716575 4762 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.716730 4762 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.718574 4762 server.go:997] "Starting client certificate rotation" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.718608 4762 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.718778 4762 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-17 07:38:25.153682627 +0000 UTC Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.718860 4762 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2394h13m0.434825006s for next certificate rotation Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.757886 4762 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.760700 4762 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.779332 4762 log.go:25] "Validated CRI v1 runtime API" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.822900 4762 log.go:25] "Validated CRI v1 image API" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.825327 4762 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.831000 4762 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-09-13-20-22-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.831069 4762 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.858860 4762 manager.go:217] Machine: {Timestamp:2025-10-09 13:25:24.854830457 +0000 UTC m=+0.628621576 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:cb0479c9-186e-453b-880a-de1db201ede6 BootID:9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:f4:a9:6e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:f4:a9:6e Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5d:6e:16 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:8c:fb:b8 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:53:ed:70 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e4:e5:d5 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:d2:1e:5a Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1a:99:3c:81:51:ec Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:26:ed:96:72:4d:7d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.859258 4762 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.859554 4762 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.861808 4762 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.862135 4762 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.862225 4762 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.862532 4762 topology_manager.go:138] "Creating topology manager with none policy" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.862550 4762 container_manager_linux.go:303] "Creating device plugin manager" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.863034 4762 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.863967 4762 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.864193 4762 state_mem.go:36] "Initialized new in-memory state store" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.864627 4762 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.868099 4762 kubelet.go:418] "Attempting to sync node with API server" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.868135 4762 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.868224 4762 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.868257 4762 kubelet.go:324] "Adding apiserver pod source" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.868281 4762 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.872788 4762 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.874409 4762 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.876695 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.876918 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.876711 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.877150 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.877487 4762 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879688 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879731 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879746 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879761 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879782 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879795 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879808 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879829 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879845 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879859 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879877 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.879890 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.884574 4762 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.885585 4762 server.go:1280] "Started kubelet" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.887129 4762 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.887166 4762 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 09 13:25:24 crc systemd[1]: Started Kubernetes Kubelet. Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.888371 4762 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.888568 4762 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.895010 4762 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.895079 4762 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.901151 4762 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.151:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186cd585c3b104a0 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-09 13:25:24.885546144 +0000 UTC m=+0.659337223,LastTimestamp:2025-10-09 13:25:24.885546144 +0000 UTC m=+0.659337223,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.902903 4762 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 16:41:21.54165271 +0000 UTC Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.903027 4762 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1923h15m56.638642303s for next certificate rotation Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.903821 4762 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.903871 4762 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.904215 4762 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.904318 4762 server.go:460] "Adding debug handlers to kubelet server" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.904444 4762 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.907336 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.907611 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.907778 4762 factory.go:55] Registering systemd factory Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.908093 4762 factory.go:221] Registration of the systemd container factory successfully Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.908761 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="200ms" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.909135 4762 factory.go:153] Registering CRI-O factory Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.909175 4762 factory.go:221] Registration of the crio container factory successfully Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.909298 4762 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.909354 4762 factory.go:103] Registering Raw factory Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.909380 4762 manager.go:1196] Started watching for new ooms in manager Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.910412 4762 manager.go:319] Starting recovery of all containers Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917532 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917619 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917664 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917683 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917702 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917739 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917757 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917775 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917794 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917810 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917827 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917845 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917862 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917905 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.917923 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.924845 4762 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.924900 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.924919 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.924978 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.924999 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925021 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925038 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925063 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925082 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925094 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925110 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925124 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925140 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925155 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925170 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925186 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925200 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925211 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925225 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925237 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925247 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925271 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925286 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925298 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925314 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925326 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925339 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925378 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925398 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925411 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925447 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925462 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925475 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925488 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925500 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925514 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925529 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925542 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925559 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925573 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925594 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925608 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925622 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925660 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925673 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925703 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925716 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925728 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925742 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925754 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925766 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925777 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925789 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925801 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925815 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925830 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925842 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925854 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925867 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925880 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925901 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925916 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925930 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925950 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925967 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925980 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.925992 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926005 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926019 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926033 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926044 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926058 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926071 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926082 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926094 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926106 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926119 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926132 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926151 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926164 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926177 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926191 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926207 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926220 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926234 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926246 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926260 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926273 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926287 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926300 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926317 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926331 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926355 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926373 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926399 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926420 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926433 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926446 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926465 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926477 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926489 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926501 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926517 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926532 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926544 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926590 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926605 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926624 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926660 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926673 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926686 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926698 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926710 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926727 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926738 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926751 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926769 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926782 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926793 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926805 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926819 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926832 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926846 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926858 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926881 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926893 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926910 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926923 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926935 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926947 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926960 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926979 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.926997 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927012 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927029 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927046 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927063 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927074 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927873 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927898 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927933 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927946 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.927966 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928019 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928040 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928060 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928074 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928093 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928117 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928132 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928158 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928215 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.928231 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.929843 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.929884 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.929959 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.929982 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.929995 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930035 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930060 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930075 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930092 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930152 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930205 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930227 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930299 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930328 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930343 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930400 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930485 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930503 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930558 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930574 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930659 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930683 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930750 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930774 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930789 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930879 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930933 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.930973 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.931006 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.931358 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.931375 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.933081 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.933967 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.934023 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.934070 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.934161 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.934309 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.937940 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.938052 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.938107 4762 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.938129 4762 reconstruct.go:97] "Volume reconstruction finished" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.938151 4762 reconciler.go:26] "Reconciler: start to sync state" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.947275 4762 manager.go:324] Recovery completed Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.960943 4762 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.962305 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.963773 4762 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.963857 4762 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.963908 4762 kubelet.go:2335] "Starting kubelet main sync loop" Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.964020 4762 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.965209 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.965263 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.965281 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:24 crc kubenswrapper[4762]: W1009 13:25:24.966072 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:24 crc kubenswrapper[4762]: E1009 13:25:24.966229 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.966491 4762 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.966531 4762 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.966561 4762 state_mem.go:36] "Initialized new in-memory state store" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.982167 4762 policy_none.go:49] "None policy: Start" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.983179 4762 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 09 13:25:24 crc kubenswrapper[4762]: I1009 13:25:24.983212 4762 state_mem.go:35] "Initializing new in-memory state store" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.005296 4762 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.040906 4762 manager.go:334] "Starting Device Plugin manager" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.041013 4762 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.041070 4762 server.go:79] "Starting device plugin registration server" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.041761 4762 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.041813 4762 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.042067 4762 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.042207 4762 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.042220 4762 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.049792 4762 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.064512 4762 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.064802 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066395 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066567 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066807 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.066880 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068008 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068054 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068080 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068354 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068787 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068874 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.068946 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.069016 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.069047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.069914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.069947 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.069969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070130 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070336 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070395 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.070992 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071252 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071285 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071488 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071497 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071529 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071604 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.071668 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.072787 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.072808 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.072817 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.072964 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.072985 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.073757 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.073775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.073783 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.074221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.074263 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.074280 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.110215 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="400ms" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140358 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140422 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140456 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140745 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140786 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.140937 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141026 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141087 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141136 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141172 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141224 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141262 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141312 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141357 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141402 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.141956 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.143930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.143979 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.143989 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.144017 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.144479 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243053 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243119 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243140 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243184 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243199 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243221 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243242 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243260 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243274 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243288 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243301 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243313 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243335 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243348 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243735 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243804 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243829 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243854 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243878 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243900 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243922 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243967 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.243991 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244015 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244017 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244055 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244095 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244039 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.244152 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.344663 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.346294 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.346352 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.346371 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.346407 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.347120 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.393925 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.400204 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.419064 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.440746 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.447741 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.456547 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-014dcceb71b62c4fee4c9d742ac54c48b1c881b0d5504b02f4d596db4125b7a8 WatchSource:0}: Error finding container 014dcceb71b62c4fee4c9d742ac54c48b1c881b0d5504b02f4d596db4125b7a8: Status 404 returned error can't find the container with id 014dcceb71b62c4fee4c9d742ac54c48b1c881b0d5504b02f4d596db4125b7a8 Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.457155 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-1af93c9f53af57a80cc675874ab8d906e704ff7cad4dd0ae0aa3130c06b025a3 WatchSource:0}: Error finding container 1af93c9f53af57a80cc675874ab8d906e704ff7cad4dd0ae0aa3130c06b025a3: Status 404 returned error can't find the container with id 1af93c9f53af57a80cc675874ab8d906e704ff7cad4dd0ae0aa3130c06b025a3 Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.463261 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-3068345fd70ddb6ee2948f6e5a8da00f8015f706d62165dee4ecf115cfe18088 WatchSource:0}: Error finding container 3068345fd70ddb6ee2948f6e5a8da00f8015f706d62165dee4ecf115cfe18088: Status 404 returned error can't find the container with id 3068345fd70ddb6ee2948f6e5a8da00f8015f706d62165dee4ecf115cfe18088 Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.472031 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-b5ce8598935a2b5eb47977d354987dcb833543947edb072df8a56baaf7b87dfa WatchSource:0}: Error finding container b5ce8598935a2b5eb47977d354987dcb833543947edb072df8a56baaf7b87dfa: Status 404 returned error can't find the container with id b5ce8598935a2b5eb47977d354987dcb833543947edb072df8a56baaf7b87dfa Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.476210 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-adfae494da64ca96ad12607c3f10a4a7838e5dba63a6821de694f27ce9b5d8b3 WatchSource:0}: Error finding container adfae494da64ca96ad12607c3f10a4a7838e5dba63a6821de694f27ce9b5d8b3: Status 404 returned error can't find the container with id adfae494da64ca96ad12607c3f10a4a7838e5dba63a6821de694f27ce9b5d8b3 Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.511683 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="800ms" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.747401 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.748728 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.748773 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.748782 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.748809 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.749336 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.889459 4762 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.928904 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.928994 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:25 crc kubenswrapper[4762]: W1009 13:25:25.965450 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:25 crc kubenswrapper[4762]: E1009 13:25:25.965528 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.968580 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3068345fd70ddb6ee2948f6e5a8da00f8015f706d62165dee4ecf115cfe18088"} Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.969442 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"014dcceb71b62c4fee4c9d742ac54c48b1c881b0d5504b02f4d596db4125b7a8"} Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.970088 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1af93c9f53af57a80cc675874ab8d906e704ff7cad4dd0ae0aa3130c06b025a3"} Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.971901 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b5ce8598935a2b5eb47977d354987dcb833543947edb072df8a56baaf7b87dfa"} Oct 09 13:25:25 crc kubenswrapper[4762]: I1009 13:25:25.973822 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"adfae494da64ca96ad12607c3f10a4a7838e5dba63a6821de694f27ce9b5d8b3"} Oct 09 13:25:26 crc kubenswrapper[4762]: W1009 13:25:26.163011 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:26 crc kubenswrapper[4762]: E1009 13:25:26.163342 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:26 crc kubenswrapper[4762]: W1009 13:25:26.284515 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:26 crc kubenswrapper[4762]: E1009 13:25:26.284674 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:26 crc kubenswrapper[4762]: E1009 13:25:26.313282 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="1.6s" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.549918 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.552032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.552075 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.552087 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.552116 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:26 crc kubenswrapper[4762]: E1009 13:25:26.552547 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.889701 4762 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.983083 4762 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630" exitCode=0 Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.983164 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.983295 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.984731 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.984776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.984799 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.987102 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988347 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988393 4762 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd" exitCode=0 Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988479 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988409 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.988561 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.989935 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.989971 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.989989 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.991464 4762 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa" exitCode=0 Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.991575 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.991591 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.992614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.992707 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.992730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.995004 4762 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364" exitCode=0 Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.995076 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.995107 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.996617 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.996710 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.996734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.999464 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.999511 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.999533 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.999553 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048"} Oct 09 13:25:26 crc kubenswrapper[4762]: I1009 13:25:26.999605 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:27 crc kubenswrapper[4762]: I1009 13:25:27.000940 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:27 crc kubenswrapper[4762]: I1009 13:25:27.001000 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:27 crc kubenswrapper[4762]: I1009 13:25:27.001025 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:27 crc kubenswrapper[4762]: W1009 13:25:27.760042 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:27 crc kubenswrapper[4762]: E1009 13:25:27.760156 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:27 crc kubenswrapper[4762]: I1009 13:25:27.888843 4762 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:27 crc kubenswrapper[4762]: E1009 13:25:27.915070 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="3.2s" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.004921 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.004968 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.004977 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.004985 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.007191 4762 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f" exitCode=0 Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.007241 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.007343 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.010802 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.010845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.010853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.018760 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.018784 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d5e61326d740880d50b889eb69f101fcc0e40a557ec6d8e76ce770ec7d456fff"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.021555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.021592 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.021604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.026556 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.026623 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.029290 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.029426 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.029446 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c"} Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030624 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030695 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030709 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.030834 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.153317 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.154251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.154280 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.154289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:28 crc kubenswrapper[4762]: I1009 13:25:28.154310 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:28 crc kubenswrapper[4762]: E1009 13:25:28.154719 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Oct 09 13:25:28 crc kubenswrapper[4762]: W1009 13:25:28.161381 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Oct 09 13:25:28 crc kubenswrapper[4762]: E1009 13:25:28.161558 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.034626 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d"} Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.034784 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.036190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.036245 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.036266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.039857 4762 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d" exitCode=0 Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.039915 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d"} Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.040048 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.040048 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.040223 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.040049 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041723 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041903 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041946 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.041964 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.042585 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.042626 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.042688 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:29 crc kubenswrapper[4762]: I1009 13:25:29.811969 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045383 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"232dc8813083fa0b44df5f73c0ab49aa56d62eda083f31ebec6c543227e48394"} Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045435 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5f970257c84754803de27650f6dcdba8432098673157e828a41dfb4659af2454"} Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045450 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"025917894eab9a67ab97ec3d33e3e2b9e1313925a893b0e18026e1e0f155e8cb"} Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045461 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"516131bb20004a365fbe66980711fd290d4eb46c4834e139efcda05b39195ffa"} Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045472 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.045557 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.046391 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.046415 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:30 crc kubenswrapper[4762]: I1009 13:25:30.046424 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.058061 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.058415 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.059064 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"92eac49b615ef93fb3fa9544df792c0a1f6b175cc385469b69d9e4a176fdca06"} Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.059450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.059482 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.059492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.059987 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.060023 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.060036 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.355613 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.356875 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.356920 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.356931 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:31 crc kubenswrapper[4762]: I1009 13:25:31.356960 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.061015 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.062319 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.062363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.062380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.119991 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.120145 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.121573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.121611 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.121626 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.684410 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.684720 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.686347 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.686430 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:32 crc kubenswrapper[4762]: I1009 13:25:32.686465 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.635809 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.636057 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.637567 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.637629 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.637693 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.796048 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.796347 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.797887 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.798012 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.798088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:33 crc kubenswrapper[4762]: I1009 13:25:33.802554 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.066569 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.066715 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.067849 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.067896 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.067915 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.184295 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.599259 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.599556 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.601491 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.601574 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:34 crc kubenswrapper[4762]: I1009 13:25:34.601586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:35 crc kubenswrapper[4762]: E1009 13:25:35.049943 4762 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.069468 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.070732 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.070810 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.070848 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.685087 4762 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 13:25:35 crc kubenswrapper[4762]: I1009 13:25:35.685190 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:25:36 crc kubenswrapper[4762]: I1009 13:25:36.071385 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:36 crc kubenswrapper[4762]: I1009 13:25:36.073088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:36 crc kubenswrapper[4762]: I1009 13:25:36.073120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:36 crc kubenswrapper[4762]: I1009 13:25:36.073131 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:36 crc kubenswrapper[4762]: I1009 13:25:36.075590 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:37 crc kubenswrapper[4762]: I1009 13:25:37.073936 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:37 crc kubenswrapper[4762]: I1009 13:25:37.074679 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:37 crc kubenswrapper[4762]: I1009 13:25:37.074714 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:37 crc kubenswrapper[4762]: I1009 13:25:37.074727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:38 crc kubenswrapper[4762]: I1009 13:25:38.384080 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 09 13:25:38 crc kubenswrapper[4762]: I1009 13:25:38.384138 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 09 13:25:38 crc kubenswrapper[4762]: W1009 13:25:38.438336 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 09 13:25:38 crc kubenswrapper[4762]: I1009 13:25:38.438462 4762 trace.go:236] Trace[1058234016]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 13:25:28.436) (total time: 10001ms): Oct 09 13:25:38 crc kubenswrapper[4762]: Trace[1058234016]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:25:38.438) Oct 09 13:25:38 crc kubenswrapper[4762]: Trace[1058234016]: [10.001492097s] [10.001492097s] END Oct 09 13:25:38 crc kubenswrapper[4762]: E1009 13:25:38.438493 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 09 13:25:38 crc kubenswrapper[4762]: I1009 13:25:38.889891 4762 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 09 13:25:38 crc kubenswrapper[4762]: W1009 13:25:38.978235 4762 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 09 13:25:38 crc kubenswrapper[4762]: I1009 13:25:38.978332 4762 trace.go:236] Trace[257178820]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 13:25:28.977) (total time: 10000ms): Oct 09 13:25:38 crc kubenswrapper[4762]: Trace[257178820]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (13:25:38.978) Oct 09 13:25:38 crc kubenswrapper[4762]: Trace[257178820]: [10.000989643s] [10.000989643s] END Oct 09 13:25:38 crc kubenswrapper[4762]: E1009 13:25:38.978355 4762 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.043872 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.044222 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.045878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.045925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.045940 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.087829 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.087955 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.088798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.088825 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.088834 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.101009 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.389829 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.389908 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.395506 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.395624 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.817514 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]log ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]etcd ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/openshift.io-startkubeinformers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/openshift.io-api-request-count-filter ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/generic-apiserver-start-informers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/priority-and-fairness-config-consumer ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/priority-and-fairness-filter ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-apiextensions-informers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-apiextensions-controllers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/crd-informer-synced ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-system-namespaces-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-cluster-authentication-info-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-legacy-token-tracking-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-service-ip-repair-controllers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Oct 09 13:25:39 crc kubenswrapper[4762]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/priority-and-fairness-config-producer ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/bootstrap-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/start-kube-aggregator-informers ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-status-local-available-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-status-remote-available-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-registration-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-wait-for-first-sync ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-discovery-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/kube-apiserver-autoregistration ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]autoregister-completion ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-openapi-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: [+]poststarthook/apiservice-openapiv3-controller ok Oct 09 13:25:39 crc kubenswrapper[4762]: livez check failed Oct 09 13:25:39 crc kubenswrapper[4762]: I1009 13:25:39.817600 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:25:40 crc kubenswrapper[4762]: I1009 13:25:40.082282 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:40 crc kubenswrapper[4762]: I1009 13:25:40.083555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:40 crc kubenswrapper[4762]: I1009 13:25:40.083620 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:40 crc kubenswrapper[4762]: I1009 13:25:40.083663 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:42 crc kubenswrapper[4762]: I1009 13:25:42.398444 4762 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 09 13:25:43 crc kubenswrapper[4762]: I1009 13:25:43.608377 4762 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 09 13:25:44 crc kubenswrapper[4762]: E1009 13:25:44.385859 4762 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.387611 4762 trace.go:236] Trace[1235129951]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 13:25:31.348) (total time: 13038ms): Oct 09 13:25:44 crc kubenswrapper[4762]: Trace[1235129951]: ---"Objects listed" error: 13038ms (13:25:44.387) Oct 09 13:25:44 crc kubenswrapper[4762]: Trace[1235129951]: [13.038650186s] [13.038650186s] END Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.387673 4762 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 09 13:25:44 crc kubenswrapper[4762]: E1009 13:25:44.388845 4762 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.391084 4762 trace.go:236] Trace[1668902973]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 13:25:33.611) (total time: 10779ms): Oct 09 13:25:44 crc kubenswrapper[4762]: Trace[1668902973]: ---"Objects listed" error: 10779ms (13:25:44.390) Oct 09 13:25:44 crc kubenswrapper[4762]: Trace[1668902973]: [10.779348351s] [10.779348351s] END Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.391111 4762 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.391519 4762 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.443108 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40404->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.443188 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40404->192.168.126.11:17697: read: connection reset by peer" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.563898 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.572416 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.816563 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.817178 4762 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.817269 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.822838 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.881956 4762 apiserver.go:52] "Watching apiserver" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.885968 4762 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.886409 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-operator/iptables-alerter-4ln5h","openshift-dns/node-resolver-2vkbh","openshift-kube-apiserver/kube-apiserver-crc","openshift-machine-config-operator/machine-config-daemon-5v6hv","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.886757 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.886853 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.886888 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:44 crc kubenswrapper[4762]: E1009 13:25:44.886907 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:44 crc kubenswrapper[4762]: E1009 13:25:44.886919 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.886965 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.887018 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.887078 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:44 crc kubenswrapper[4762]: E1009 13:25:44.887144 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.887273 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.887300 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.891809 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-9wtqb"] Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.892204 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.892270 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9wtqb" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.892319 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893080 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893439 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893469 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893508 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893518 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893441 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.893934 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-n6lnd"] Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.895753 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.896083 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.896656 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.897009 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.900570 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.901193 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.902808 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.903458 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.897094 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.903985 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.897281 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.900119 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.900283 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.905202 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.905351 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.906413 4762 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.906417 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.906667 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.922184 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.943105 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.955699 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.968676 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.977345 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.989881 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996851 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996889 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996908 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996926 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996944 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.996984 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997002 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997032 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997049 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997065 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997083 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997117 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997135 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997150 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997164 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997206 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997222 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997238 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997254 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997269 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997268 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997282 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997291 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997324 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997342 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997358 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997373 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997389 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997425 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997439 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997472 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997488 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997455 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997522 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997534 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997544 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997579 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997602 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997650 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997676 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997702 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997726 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997747 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997771 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997765 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997800 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997825 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997850 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997872 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997895 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997926 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997950 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997973 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.997997 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998022 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998047 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998076 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998099 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998121 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998144 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998169 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998192 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998214 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998237 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998261 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998283 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998304 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998328 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998349 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998395 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998419 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998441 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998467 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998491 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998513 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998534 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998558 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998581 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998604 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998925 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998957 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:44 crc kubenswrapper[4762]: I1009 13:25:44.998983 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999007 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999030 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999056 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999077 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999100 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999122 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999148 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999174 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999201 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999225 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.997825 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998000 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998141 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998170 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998384 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998431 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998448 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998571 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998875 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998932 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.998996 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999073 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999116 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999147 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999228 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999257 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999266 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999414 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999411 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999438 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999587 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999922 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999921 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999968 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000017 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000167 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000292 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:44.999254 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000562 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000598 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000603 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000971 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001419 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001457 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001511 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001533 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001556 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001574 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001590 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001607 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001660 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001697 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001722 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001740 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001761 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001778 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001795 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001814 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001831 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001853 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001876 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001898 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001921 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001944 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001962 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001981 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001998 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002021 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002064 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002091 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002115 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002139 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002162 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002187 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002206 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002223 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002241 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002259 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002276 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002296 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002312 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002330 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002353 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002374 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002399 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002425 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002453 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002475 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002499 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002525 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002550 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002572 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002595 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002619 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002663 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002689 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002715 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002738 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002764 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002791 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002817 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002839 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002863 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002891 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002914 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002951 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002979 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003005 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003033 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003058 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003079 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003101 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003122 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003143 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003163 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003183 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003226 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003245 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003262 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003281 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003299 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003322 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003341 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003358 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003375 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003392 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003409 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003427 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003451 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003476 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003499 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003522 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003547 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003570 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003598 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003622 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003666 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003692 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003721 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003810 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003838 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003867 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003894 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003919 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003946 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003970 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004035 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/263d57f9-b10b-4ce1-adad-774600b977d8-hosts-file\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004067 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-binary-copy\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004096 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-system-cni-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004123 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-k8s-cni-cncf-io\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004150 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-multus-certs\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004183 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004210 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/366049a3-acf6-488c-9f93-4557528d6d14-proxy-tls\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004235 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-multus\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004264 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kljt\" (UniqueName: \"kubernetes.io/projected/c847aae6-277a-45dc-86d0-9b175f7e8177-kube-api-access-2kljt\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004294 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-netns\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004316 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-hostroot\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004342 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-cnibin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004364 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-cni-binary-copy\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004388 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004410 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-etc-kubernetes\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004437 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004462 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004489 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004514 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-daemon-config\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-socket-dir-parent\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004556 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-bin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004581 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004605 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/366049a3-acf6-488c-9f93-4557528d6d14-rootfs\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004701 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004728 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004746 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004764 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-kubelet\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004785 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000627 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004812 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/366049a3-acf6-488c-9f93-4557528d6d14-mcd-auth-proxy-config\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cnibin\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004863 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-system-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004900 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004921 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-os-release\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004939 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004954 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-os-release\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004972 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004989 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gmcr\" (UniqueName: \"kubernetes.io/projected/263d57f9-b10b-4ce1-adad-774600b977d8-kube-api-access-2gmcr\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005006 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005027 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005046 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-conf-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005064 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005083 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005104 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n248\" (UniqueName: \"kubernetes.io/projected/cf8b8ba7-96cd-4cdd-9925-94dd98242050-kube-api-access-8n248\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005122 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hhqx\" (UniqueName: \"kubernetes.io/projected/366049a3-acf6-488c-9f93-4557528d6d14-kube-api-access-4hhqx\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005172 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005184 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005195 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005207 4762 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005202 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008199 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005217 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005521 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000984 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000970 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001092 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001173 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001279 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001609 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001670 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001698 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001832 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.001955 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002059 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002263 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002301 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002924 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002956 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.002981 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003124 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003155 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003269 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003302 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003424 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003436 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003554 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003657 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.003826 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004079 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004105 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004144 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004102 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004249 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004562 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.004747 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005619 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005885 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005917 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.000830 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005929 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.005613 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.006432 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.006452 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007067 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007360 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007360 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007410 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.007485 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:25:45.507464779 +0000 UTC m=+21.281255818 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008553 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009021 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009090 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009098 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007767 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008010 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008176 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008179 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.008238 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009336 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009491 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.007496 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009571 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009601 4762 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009651 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009680 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009701 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009720 4762 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009738 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009758 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009775 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009793 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009821 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009827 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009841 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009860 4762 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009880 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009898 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009918 4762 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009940 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009960 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009980 4762 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.009999 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010016 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010034 4762 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010110 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010141 4762 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010162 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010179 4762 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010198 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010215 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010233 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010251 4762 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010269 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.010877 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011120 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011157 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011276 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011295 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011514 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011690 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.011954 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012058 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012065 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012359 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012534 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012840 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.012888 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013121 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013287 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013425 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013520 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.016141 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013505 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.013994 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.014006 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.014208 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.014339 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.015496 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.015516 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.015858 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.015840 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.014446 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.015896 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.016392 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.016411 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.016809 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.016827 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.017123 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.017119 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.017260 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.017137 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.017870 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.018630 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.018791 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.018851 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.018872 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.018909 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019182 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019464 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019679 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019713 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019796 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019876 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019892 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019921 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.019920 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020259 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020290 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020390 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020506 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020586 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020729 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020822 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020857 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020906 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020996 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.021011 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.021625 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.022723 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.020983 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.023083 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.023707 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.024323 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.024340 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.024393 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.024346 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.025518 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.025830 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.025931 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:45.525903153 +0000 UTC m=+21.299694192 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.025967 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.026014 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.026049 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.026220 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.026532 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.026602 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:45.52658027 +0000 UTC m=+21.300371309 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.026845 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.027018 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.027061 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.027215 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028002 4762 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028014 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028016 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028462 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.028994 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.030009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.030649 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.031200 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.033321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.034787 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.036009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.049850 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.050289 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.050323 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.050344 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.050437 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:45.550407213 +0000 UTC m=+21.324198272 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.052502 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.053056 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.053595 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.060180 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.061327 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.061353 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.061367 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.061427 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:45.561407696 +0000 UTC m=+21.335198735 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.063703 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.079563 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.080077 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.080232 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.086949 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.087391 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.090225 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.094250 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.098227 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.098812 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.099935 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.101502 4762 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d" exitCode=255 Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.101586 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d"} Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.108621 4762 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.108895 4762 scope.go:117] "RemoveContainer" containerID="c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.109500 4762 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110566 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110627 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-conf-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110671 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n248\" (UniqueName: \"kubernetes.io/projected/cf8b8ba7-96cd-4cdd-9925-94dd98242050-kube-api-access-8n248\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110695 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hhqx\" (UniqueName: \"kubernetes.io/projected/366049a3-acf6-488c-9f93-4557528d6d14-kube-api-access-4hhqx\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110707 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110717 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/263d57f9-b10b-4ce1-adad-774600b977d8-hosts-file\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110742 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-binary-copy\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110765 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-system-cni-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110787 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-k8s-cni-cncf-io\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110809 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-multus-certs\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110830 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/366049a3-acf6-488c-9f93-4557528d6d14-proxy-tls\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110850 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-multus\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110870 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kljt\" (UniqueName: \"kubernetes.io/projected/c847aae6-277a-45dc-86d0-9b175f7e8177-kube-api-access-2kljt\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110889 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-netns\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110909 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-hostroot\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110927 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-cnibin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110950 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-cni-binary-copy\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110974 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.110997 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-etc-kubernetes\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111019 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111053 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-daemon-config\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111109 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-socket-dir-parent\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111131 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-bin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111138 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-conf-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111180 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/366049a3-acf6-488c-9f93-4557528d6d14-rootfs\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/366049a3-acf6-488c-9f93-4557528d6d14-rootfs\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111208 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-netns\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/263d57f9-b10b-4ce1-adad-774600b977d8-hosts-file\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111723 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111816 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-multus\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111835 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-system-cni-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111863 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111865 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-k8s-cni-cncf-io\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111878 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-run-multus-certs\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.111954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-cni-binary-copy\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112005 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-cni-bin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112059 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-etc-kubernetes\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112091 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-hostroot\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112113 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-socket-dir-parent\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112499 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112573 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-kubelet\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112614 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112665 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-host-var-lib-kubelet\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112723 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/366049a3-acf6-488c-9f93-4557528d6d14-mcd-auth-proxy-config\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112763 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cnibin\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112785 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-system-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-os-release\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112836 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-os-release\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112864 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.112903 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gmcr\" (UniqueName: \"kubernetes.io/projected/263d57f9-b10b-4ce1-adad-774600b977d8-kube-api-access-2gmcr\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113089 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-os-release\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113121 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-system-cni-dir\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113133 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c847aae6-277a-45dc-86d0-9b175f7e8177-multus-daemon-config\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113200 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-os-release\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113228 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cnibin\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113199 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-binary-copy\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113273 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c847aae6-277a-45dc-86d0-9b175f7e8177-cnibin\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.113608 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/366049a3-acf6-488c-9f93-4557528d6d14-mcd-auth-proxy-config\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.114170 4762 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.114614 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/cf8b8ba7-96cd-4cdd-9925-94dd98242050-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.115429 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/cf8b8ba7-96cd-4cdd-9925-94dd98242050-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.121549 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.125313 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/366049a3-acf6-488c-9f93-4557528d6d14-proxy-tls\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.125951 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kljt\" (UniqueName: \"kubernetes.io/projected/c847aae6-277a-45dc-86d0-9b175f7e8177-kube-api-access-2kljt\") pod \"multus-9wtqb\" (UID: \"c847aae6-277a-45dc-86d0-9b175f7e8177\") " pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126119 4762 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126155 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126167 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126179 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126190 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126200 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126208 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126217 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126229 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126238 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126248 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126258 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126267 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126276 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126288 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126297 4762 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126307 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126317 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126328 4762 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126337 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126346 4762 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126356 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126364 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126373 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126384 4762 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126393 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126402 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126411 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126422 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126434 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126446 4762 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126456 4762 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126465 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126477 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126487 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126498 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126508 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126518 4762 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126527 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126535 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126543 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126552 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126562 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126573 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126584 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126596 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126606 4762 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126618 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126646 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126662 4762 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126674 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126685 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126694 4762 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126703 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126733 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126742 4762 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126751 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126761 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126769 4762 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126778 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126786 4762 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126794 4762 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126805 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126814 4762 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126824 4762 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126832 4762 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126841 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126849 4762 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126857 4762 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126867 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126878 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126887 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126898 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126906 4762 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126915 4762 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126923 4762 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126933 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126941 4762 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126950 4762 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126958 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126967 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126977 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126986 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.126995 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127003 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127012 4762 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127021 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127029 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127038 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127048 4762 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127057 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127065 4762 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127075 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127084 4762 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127092 4762 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127104 4762 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127112 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127122 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127130 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127138 4762 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127146 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127155 4762 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127164 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127172 4762 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127180 4762 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127189 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127197 4762 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127205 4762 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127213 4762 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127221 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127231 4762 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127239 4762 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127248 4762 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127258 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127268 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127278 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127287 4762 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127295 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127304 4762 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127313 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127321 4762 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127333 4762 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127344 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127356 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127366 4762 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127377 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127389 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127400 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127413 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127424 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127435 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127472 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127487 4762 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127499 4762 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127512 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127529 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127539 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127551 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127563 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127575 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127589 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127601 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127614 4762 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127626 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127655 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127665 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127675 4762 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127683 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127692 4762 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127702 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127712 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127722 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127732 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127741 4762 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127749 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127758 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127767 4762 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127776 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.127876 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hhqx\" (UniqueName: \"kubernetes.io/projected/366049a3-acf6-488c-9f93-4557528d6d14-kube-api-access-4hhqx\") pod \"machine-config-daemon-5v6hv\" (UID: \"366049a3-acf6-488c-9f93-4557528d6d14\") " pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.128076 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.128442 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gmcr\" (UniqueName: \"kubernetes.io/projected/263d57f9-b10b-4ce1-adad-774600b977d8-kube-api-access-2gmcr\") pod \"node-resolver-2vkbh\" (UID: \"263d57f9-b10b-4ce1-adad-774600b977d8\") " pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.130191 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n248\" (UniqueName: \"kubernetes.io/projected/cf8b8ba7-96cd-4cdd-9925-94dd98242050-kube-api-access-8n248\") pod \"multus-additional-cni-plugins-n6lnd\" (UID: \"cf8b8ba7-96cd-4cdd-9925-94dd98242050\") " pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.137956 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.147536 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.156832 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.164834 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.182385 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.193061 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.200829 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.207172 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.209485 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.219484 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.219693 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.230438 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.230608 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: W1009 13:25:45.237139 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-4773e4d75a300a903e939217dc8934c628809e2e770211d6a9e2574f4b5441d6 WatchSource:0}: Error finding container 4773e4d75a300a903e939217dc8934c628809e2e770211d6a9e2574f4b5441d6: Status 404 returned error can't find the container with id 4773e4d75a300a903e939217dc8934c628809e2e770211d6a9e2574f4b5441d6 Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.243370 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2vkbh" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.242332 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: W1009 13:25:45.250715 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-44738f40cc8ed786ef8ae88ce91cc0ed5c0aaf1654fc26bc698dc6859a984cc5 WatchSource:0}: Error finding container 44738f40cc8ed786ef8ae88ce91cc0ed5c0aaf1654fc26bc698dc6859a984cc5: Status 404 returned error can't find the container with id 44738f40cc8ed786ef8ae88ce91cc0ed5c0aaf1654fc26bc698dc6859a984cc5 Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.251729 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 13:25:45 crc kubenswrapper[4762]: W1009 13:25:45.255415 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod366049a3_acf6_488c_9f93_4557528d6d14.slice/crio-ded1d5d2c9d20522a192d92faa895d0a61bef0227635ef9a2765ef9e0eecbdf8 WatchSource:0}: Error finding container ded1d5d2c9d20522a192d92faa895d0a61bef0227635ef9a2765ef9e0eecbdf8: Status 404 returned error can't find the container with id ded1d5d2c9d20522a192d92faa895d0a61bef0227635ef9a2765ef9e0eecbdf8 Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.256126 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.259453 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9wtqb" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.264790 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.272454 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jl67g"] Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.277860 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.281843 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.281867 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.282770 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.284186 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.285241 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.285444 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.290498 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.296688 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.322458 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334012 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334046 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334088 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334113 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334162 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334182 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334207 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334231 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334254 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334274 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334315 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334340 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334358 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334374 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334391 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334409 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334426 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg6s6\" (UniqueName: \"kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334459 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.334476 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.339881 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.381074 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.392430 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.410996 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.430486 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436052 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436118 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436155 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436179 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436201 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436225 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436247 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436231 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436325 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436270 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg6s6\" (UniqueName: \"kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436611 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436665 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436681 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436698 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436713 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436730 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436747 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436767 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436783 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436794 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436840 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436797 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436950 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436886 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.436993 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437021 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437139 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437144 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437189 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437208 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437250 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437288 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437402 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437471 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437523 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.437536 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.438080 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.446107 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.448930 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.455085 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg6s6\" (UniqueName: \"kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6\") pod \"ovnkube-node-jl67g\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.461964 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.477941 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.494839 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.506420 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.523310 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.533361 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.537382 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.537466 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.537508 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.537578 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.537624 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:46.537612405 +0000 UTC m=+22.311403434 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.537856 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:25:46.53782848 +0000 UTC m=+22.311619549 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.537915 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.538078 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:46.538009835 +0000 UTC m=+22.311800864 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.548220 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.555506 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.574321 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.585866 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.604527 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.619760 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.637827 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:45 crc kubenswrapper[4762]: I1009 13:25:45.637864 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.637978 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638002 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638014 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638065 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:46.638050069 +0000 UTC m=+22.411841108 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638118 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638153 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.638918 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:45 crc kubenswrapper[4762]: E1009 13:25:45.640005 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:46.639983128 +0000 UTC m=+22.413774197 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.108079 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.111031 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.111204 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.113168 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.113224 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"c547c13e2feeb6c4218404f36a4864f31868751e77656df4cf66329771401bfe"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.115164 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.115197 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.115239 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"ded1d5d2c9d20522a192d92faa895d0a61bef0227635ef9a2765ef9e0eecbdf8"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.116480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerStarted","Data":"cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.116506 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerStarted","Data":"9ff8747be5b6f17f47f70fab402be149e9f1931e415caeda3f4911bfab6b67de"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.118211 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38" exitCode=0 Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.118262 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.118325 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerStarted","Data":"72762fee513cdc8c1b176a94f104ba9bef01f54887be37d3e1d374e752c604c6"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.119669 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" exitCode=0 Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.119689 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.119723 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"f408a16ef88e2fa16ef441be0512eb3c8832da8b38734f105b06f7f0682d5668"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.121601 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2vkbh" event={"ID":"263d57f9-b10b-4ce1-adad-774600b977d8","Type":"ContainerStarted","Data":"732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.121663 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2vkbh" event={"ID":"263d57f9-b10b-4ce1-adad-774600b977d8","Type":"ContainerStarted","Data":"3a249abb38f525e5bbb4e931394d26a3b57d129aa7dda95ac7ca031b884e507b"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.122672 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"44738f40cc8ed786ef8ae88ce91cc0ed5c0aaf1654fc26bc698dc6859a984cc5"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.124304 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.124330 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.124345 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4773e4d75a300a903e939217dc8934c628809e2e770211d6a9e2574f4b5441d6"} Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.146936 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.175125 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.225588 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.242557 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.256251 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.269679 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.286472 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.302222 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.322807 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.342448 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.355662 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.373697 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.389518 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.409190 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.427099 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.440841 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.460501 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.475125 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.490056 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.506871 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.521076 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.537849 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.550282 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.550423 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.550475 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.550569 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.550626 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:48.550610101 +0000 UTC m=+24.324401140 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.550972 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:25:48.55096135 +0000 UTC m=+24.324752389 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.551044 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.551077 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:48.551066123 +0000 UTC m=+24.324857162 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.554441 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.568868 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.588877 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.612569 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:46Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.651492 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.651562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651754 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651791 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651804 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651763 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651865 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:48.651845745 +0000 UTC m=+24.425636974 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651890 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651909 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.651975 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:48.651955228 +0000 UTC m=+24.425746267 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.964576 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.964956 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.964643 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.965440 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.965468 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:46 crc kubenswrapper[4762]: E1009 13:25:46.965523 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.968370 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.969126 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.970422 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.971114 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.972165 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.972768 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.973466 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.974578 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.975252 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.976183 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.976814 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.979011 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.980197 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.980794 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.983092 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.983934 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.984987 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.985516 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.986213 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.987552 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.988104 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.994583 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.995111 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.996491 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.997196 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.997977 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.999185 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 09 13:25:46 crc kubenswrapper[4762]: I1009 13:25:46.999780 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.000801 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.001407 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.002329 4762 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.002436 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.004159 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.005054 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.005534 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.007256 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.008072 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.009034 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.009768 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.010885 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.011461 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.012900 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.013775 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.014946 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.015491 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.016682 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.017364 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.018420 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.018997 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.019837 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.020293 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.021199 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.021908 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.022430 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.132194 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.132248 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.132262 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.132276 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.132289 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.136442 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807" exitCode=0 Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.136473 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807"} Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.153449 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.167205 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.190529 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.203789 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.218903 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.232778 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.247860 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.272337 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.285879 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.299021 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.325610 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.351766 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:47 crc kubenswrapper[4762]: I1009 13:25:47.362898 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:47Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.142554 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9" exitCode=0 Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.142599 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9"} Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.150362 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.153220 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a"} Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.168429 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.194556 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.209490 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.230127 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.251028 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.271512 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.291961 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.311201 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.331920 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.345076 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.370821 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.389303 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.404920 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.419420 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.432404 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.451396 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.468822 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.484005 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.496742 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.516168 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.528380 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.539478 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.551897 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.568533 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.572018 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.572118 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.572161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.572245 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:25:52.57221371 +0000 UTC m=+28.346004759 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.572279 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.572298 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.572402 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:52.572371284 +0000 UTC m=+28.346162363 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.572429 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:52.572416975 +0000 UTC m=+28.346208024 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.584886 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.602769 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.673614 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.673740 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.673889 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.673960 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.673983 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.674067 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:52.674040119 +0000 UTC m=+28.447831198 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.673914 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.674107 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.674126 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.674208 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:52.674187752 +0000 UTC m=+28.447978871 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.965079 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.965098 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.965234 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.965674 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:48 crc kubenswrapper[4762]: I1009 13:25:48.965818 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:48 crc kubenswrapper[4762]: E1009 13:25:48.966128 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.159968 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e" exitCode=0 Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.160062 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e"} Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.177974 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.196293 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.214258 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.226375 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.241347 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.252678 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.264916 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.280138 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.293832 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.312232 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.325205 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.336496 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:49 crc kubenswrapper[4762]: I1009 13:25:49.348165 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:49Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.169414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.172891 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f" exitCode=0 Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.172946 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f"} Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.194276 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.212945 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.231846 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.244343 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.258239 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.273054 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.285463 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.297647 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.317136 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.329742 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.339801 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.352827 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.365025 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.789316 4762 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.792819 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.792885 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.792908 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.793068 4762 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.801302 4762 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.801932 4762 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.803541 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.803815 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.803965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.804164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.804317 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.828128 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.833778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.833828 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.833838 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.833853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.833863 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.857167 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.862360 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.862410 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.862426 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.862446 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.862461 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.880743 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.884793 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.884830 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.884841 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.884859 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.884870 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.898768 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.902714 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.902853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.902935 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.903019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.903097 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.922808 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.922950 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.924616 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.924677 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.924692 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.924709 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.924721 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:50Z","lastTransitionTime":"2025-10-09T13:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.965043 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.965069 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:50 crc kubenswrapper[4762]: I1009 13:25:50.965162 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.965512 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.965600 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:50 crc kubenswrapper[4762]: E1009 13:25:50.965458 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.027940 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.028030 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.028057 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.028088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.028114 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.131572 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.131645 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.131661 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.131677 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.131689 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.182095 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf8b8ba7-96cd-4cdd-9925-94dd98242050" containerID="aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d" exitCode=0 Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.182759 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerDied","Data":"aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.209418 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.210441 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-bj499"] Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.211144 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.216166 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.216516 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.217122 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.217461 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.234100 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.234136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.234148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.234166 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.234182 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.236865 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.252430 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.267266 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.282378 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.295287 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.303447 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b164d4fd-19df-4902-971f-5efe403e61e0-serviceca\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.303507 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqf4b\" (UniqueName: \"kubernetes.io/projected/b164d4fd-19df-4902-971f-5efe403e61e0-kube-api-access-tqf4b\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.303539 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b164d4fd-19df-4902-971f-5efe403e61e0-host\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.306179 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.317028 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.325492 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337008 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337098 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337170 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337191 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337247 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.337261 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.348174 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.359536 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.383220 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.396112 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.404037 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.404087 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b164d4fd-19df-4902-971f-5efe403e61e0-serviceca\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.404156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqf4b\" (UniqueName: \"kubernetes.io/projected/b164d4fd-19df-4902-971f-5efe403e61e0-kube-api-access-tqf4b\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.404182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b164d4fd-19df-4902-971f-5efe403e61e0-host\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.404237 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b164d4fd-19df-4902-971f-5efe403e61e0-host\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.405173 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b164d4fd-19df-4902-971f-5efe403e61e0-serviceca\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.417581 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.422198 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqf4b\" (UniqueName: \"kubernetes.io/projected/b164d4fd-19df-4902-971f-5efe403e61e0-kube-api-access-tqf4b\") pod \"node-ca-bj499\" (UID: \"b164d4fd-19df-4902-971f-5efe403e61e0\") " pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.431998 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.440370 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.440399 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.440407 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.440420 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.440428 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.447690 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.457338 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.467959 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.479892 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.490744 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.509811 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.525169 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.531171 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bj499" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.536040 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.544541 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.544577 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.544587 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.544604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.544614 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.551981 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.577716 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:51Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.645999 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.646040 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.646053 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.646070 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.646081 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.748335 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.748561 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.748569 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.748582 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.748590 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.850432 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.850462 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.850469 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.850482 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.850492 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.952767 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.952795 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.952803 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.952816 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:51 crc kubenswrapper[4762]: I1009 13:25:51.952825 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:51Z","lastTransitionTime":"2025-10-09T13:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.056422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.056479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.056498 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.056520 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.056541 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.159689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.159728 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.159739 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.159759 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.159770 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.187530 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bj499" event={"ID":"b164d4fd-19df-4902-971f-5efe403e61e0","Type":"ContainerStarted","Data":"4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.187588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bj499" event={"ID":"b164d4fd-19df-4902-971f-5efe403e61e0","Type":"ContainerStarted","Data":"44ff8431822738931e7a0424e85aec1518b1eb9474e046eafdd3df8aef4b85ba"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.191606 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" event={"ID":"cf8b8ba7-96cd-4cdd-9925-94dd98242050","Type":"ContainerStarted","Data":"1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.195759 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.196092 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.196140 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.212487 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.226788 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.229855 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.229939 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.241211 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.256962 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.261559 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.261601 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.261612 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.261648 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.261660 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.268129 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.282076 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.294228 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.305756 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.316209 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.327261 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.343006 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.353089 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.364273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.364332 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.364350 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.364374 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.364392 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.368039 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.389144 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.408935 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.424027 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.441017 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.455463 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.467181 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.467408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.467615 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.467822 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.468010 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.469505 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.493401 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.512754 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.527245 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.543911 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.557313 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.569347 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.571067 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.571115 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.571132 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.571156 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.571175 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.584752 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.601132 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.614859 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.617168 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.617307 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.617390 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.617360128 +0000 UTC m=+36.391151187 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.617405 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.617464 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.61744975 +0000 UTC m=+36.391240799 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.617508 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.617692 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.617752 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.617735807 +0000 UTC m=+36.391526866 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.674856 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.674929 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.674954 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.674985 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.675010 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.718808 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.718870 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.718980 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.718999 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719011 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719076 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.719059613 +0000 UTC m=+36.492850652 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719189 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719257 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719289 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.719403 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.719370821 +0000 UTC m=+36.493161890 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.777510 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.777561 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.777579 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.777602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.777620 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.880386 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.880425 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.880433 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.880447 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.880457 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.964793 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.964857 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.964914 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.964793 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.965013 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:52 crc kubenswrapper[4762]: E1009 13:25:52.965085 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.984510 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.984573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.984586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.984602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:52 crc kubenswrapper[4762]: I1009 13:25:52.984614 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:52Z","lastTransitionTime":"2025-10-09T13:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.086677 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.086712 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.086723 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.086741 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.086752 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.189580 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.189665 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.189689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.189731 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.189751 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.200307 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.293489 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.293950 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.294096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.294226 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.294363 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.397186 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.397221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.397230 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.397246 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.397256 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.500377 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.500439 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.500462 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.500490 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.500512 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.604336 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.604400 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.604417 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.604443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.604461 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.707214 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.707254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.707264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.707278 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.707288 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.809880 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.809914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.809926 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.809943 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.809955 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.912421 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.912784 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.912797 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.912817 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:53 crc kubenswrapper[4762]: I1009 13:25:53.912833 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:53Z","lastTransitionTime":"2025-10-09T13:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.015711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.015762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.015776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.015794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.015808 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.118232 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.118276 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.118286 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.118301 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.118311 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.202751 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.221443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.221493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.221505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.221523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.221536 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.324235 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.324280 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.324292 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.324313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.324327 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.427569 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.427654 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.427665 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.427687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.427702 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.530668 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.530745 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.530768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.530798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.530865 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.633428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.633470 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.633479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.633492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.633500 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.736812 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.736899 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.736927 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.736959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.736980 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.839569 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.839677 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.839702 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.839732 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.839753 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.943422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.943512 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.943537 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.943570 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.943594 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:54Z","lastTransitionTime":"2025-10-09T13:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.964901 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.964997 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:54 crc kubenswrapper[4762]: E1009 13:25:54.965067 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.964997 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:54 crc kubenswrapper[4762]: E1009 13:25:54.965204 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:54 crc kubenswrapper[4762]: E1009 13:25:54.965336 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:54 crc kubenswrapper[4762]: I1009 13:25:54.988268 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:54Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.011341 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.024111 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.040425 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.046164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.046230 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.046248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.046274 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.046292 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.062083 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.080697 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.092371 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.106130 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.119426 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.134898 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.148098 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.148130 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.148138 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.148151 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.148159 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.152976 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.166587 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.176006 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.188180 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.205982 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/0.log" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.209147 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb" exitCode=1 Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.209193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.210989 4762 scope.go:117] "RemoveContainer" containerID="aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.221939 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.241909 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.250171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.250216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.250232 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.250256 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.250270 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.259471 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.271349 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.285508 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.301453 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.310919 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.323175 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.342509 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.352593 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.352644 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.352657 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.352672 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.352683 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.354032 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.366239 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.377537 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.390078 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.405153 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.459050 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.459086 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.459095 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.459109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.459118 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.562330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.562375 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.562391 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.562413 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.562430 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.665136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.665183 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.665198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.665217 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.665231 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.767695 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.767734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.767742 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.767757 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.767766 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.869962 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.870262 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.870359 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.870444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.870543 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.973317 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.973367 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.973387 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.973408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:55 crc kubenswrapper[4762]: I1009 13:25:55.973422 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:55Z","lastTransitionTime":"2025-10-09T13:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.076102 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.076159 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.076169 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.076186 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.076196 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.178814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.178883 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.178902 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.178928 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.178946 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.214732 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/1.log" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.215461 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/0.log" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.218691 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84" exitCode=1 Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.218738 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.218782 4762 scope.go:117] "RemoveContainer" containerID="aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.219651 4762 scope.go:117] "RemoveContainer" containerID="3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84" Oct 09 13:25:56 crc kubenswrapper[4762]: E1009 13:25:56.220077 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.242429 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.258387 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.275412 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.281681 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.281731 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.281747 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.281771 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.281788 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.293164 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.305960 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.334259 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.351510 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.371628 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.386220 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.386318 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.386337 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.386382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.386407 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.391937 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.411743 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.425996 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.440953 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.458914 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.468675 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.488287 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.488330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.488342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.488360 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.488371 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.590910 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.590966 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.590978 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.590995 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.591006 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.693690 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.693740 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.693767 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.693784 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.693796 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.797015 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.797088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.797111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.797144 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.797168 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.900047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.900108 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.900127 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.900152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.900169 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:56Z","lastTransitionTime":"2025-10-09T13:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.965102 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.965203 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:56 crc kubenswrapper[4762]: E1009 13:25:56.965280 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:56 crc kubenswrapper[4762]: I1009 13:25:56.965330 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:56 crc kubenswrapper[4762]: E1009 13:25:56.965427 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:56 crc kubenswrapper[4762]: E1009 13:25:56.965533 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.004126 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.004171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.004184 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.004201 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.004212 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.106905 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.106965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.106982 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.107006 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.107024 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.210142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.210235 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.210254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.210277 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.210294 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.223908 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/1.log" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.314013 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.314102 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.314126 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.314156 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.314178 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.417994 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.418061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.418083 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.418117 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.418139 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.521610 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.521734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.521758 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.521787 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.521808 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.601229 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z"] Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.602067 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.604990 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.605046 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627006 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627056 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627112 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627129 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.627172 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.648842 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.664779 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.683325 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.683374 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xpd4\" (UniqueName: \"kubernetes.io/projected/cf0dada3-5765-4a2e-b28a-f9291c2d6428-kube-api-access-8xpd4\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.683404 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.683442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.696494 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.716352 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.730306 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.730351 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.730367 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.730388 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.730404 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.731264 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.750941 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.765580 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.784326 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.784735 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.784798 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xpd4\" (UniqueName: \"kubernetes.io/projected/cf0dada3-5765-4a2e-b28a-f9291c2d6428-kube-api-access-8xpd4\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.784835 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.784891 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.785617 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.786018 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.789478 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cf0dada3-5765-4a2e-b28a-f9291c2d6428-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.797472 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.808449 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.814236 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xpd4\" (UniqueName: \"kubernetes.io/projected/cf0dada3-5765-4a2e-b28a-f9291c2d6428-kube-api-access-8xpd4\") pod \"ovnkube-control-plane-749d76644c-fx92z\" (UID: \"cf0dada3-5765-4a2e-b28a-f9291c2d6428\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.826786 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.833978 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.834009 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.834019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.834034 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.834045 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.843827 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.859246 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.872892 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:57Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.927410 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.938765 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.938841 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.938864 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.938893 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:57 crc kubenswrapper[4762]: I1009 13:25:57.938915 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:57Z","lastTransitionTime":"2025-10-09T13:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:57 crc kubenswrapper[4762]: W1009 13:25:57.941346 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf0dada3_5765_4a2e_b28a_f9291c2d6428.slice/crio-a362fc0a13324236f2cefe56e115ab784b8d41e458ab5791ce4973fa9f8f79c5 WatchSource:0}: Error finding container a362fc0a13324236f2cefe56e115ab784b8d41e458ab5791ce4973fa9f8f79c5: Status 404 returned error can't find the container with id a362fc0a13324236f2cefe56e115ab784b8d41e458ab5791ce4973fa9f8f79c5 Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.042323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.042363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.042379 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.042399 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.042410 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.144992 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.145020 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.145030 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.145047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.145056 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.233482 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" event={"ID":"cf0dada3-5765-4a2e-b28a-f9291c2d6428","Type":"ContainerStarted","Data":"6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.233560 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" event={"ID":"cf0dada3-5765-4a2e-b28a-f9291c2d6428","Type":"ContainerStarted","Data":"a362fc0a13324236f2cefe56e115ab784b8d41e458ab5791ce4973fa9f8f79c5"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.247424 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.247453 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.247462 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.247474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.247483 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.350831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.350898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.350915 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.350944 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.350962 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.392868 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.415257 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.427513 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.438124 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.452985 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.453019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.453031 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.453049 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.453062 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.456748 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.469350 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.479224 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.491575 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.502733 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.514324 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.526465 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.535789 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.546169 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.554691 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.554738 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.554747 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.554764 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.554773 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.557059 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.568529 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.578140 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.657606 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.657697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.657716 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.657749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.657767 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.705892 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-k4bwn"] Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.706551 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.706680 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.724812 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.743072 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.756340 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.760357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.760411 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.760429 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.760451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.760467 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.775131 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.791404 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.794672 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vrjl\" (UniqueName: \"kubernetes.io/projected/f9a76399-c2ae-487b-a52c-f0e271fb1d20-kube-api-access-2vrjl\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.794723 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.811119 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.823662 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.835911 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.854553 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.863833 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.863871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.863880 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.863895 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.863906 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.866244 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.881571 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.895555 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vrjl\" (UniqueName: \"kubernetes.io/projected/f9a76399-c2ae-487b-a52c-f0e271fb1d20-kube-api-access-2vrjl\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.895599 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.895721 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.895777 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:25:59.395761419 +0000 UTC m=+35.169552458 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.905237 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.921715 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.925609 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vrjl\" (UniqueName: \"kubernetes.io/projected/f9a76399-c2ae-487b-a52c-f0e271fb1d20-kube-api-access-2vrjl\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.938211 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.953990 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.964714 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.964766 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.964815 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.964821 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.965065 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:25:58 crc kubenswrapper[4762]: E1009 13:25:58.965202 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.966310 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.966347 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.966362 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.966383 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.966397 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:58Z","lastTransitionTime":"2025-10-09T13:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:58 crc kubenswrapper[4762]: I1009 13:25:58.970276 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:58Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.068605 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.068664 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.068673 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.068686 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.068695 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.172117 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.172257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.172294 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.172322 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.172346 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.240200 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" event={"ID":"cf0dada3-5765-4a2e-b28a-f9291c2d6428","Type":"ContainerStarted","Data":"aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.258684 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.274970 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.275949 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.276024 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.276039 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.276061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.276073 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.291494 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.304603 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.321505 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.338209 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.353503 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.370263 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.378326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.378390 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.378404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.378422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.378431 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.395774 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.402346 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:25:59 crc kubenswrapper[4762]: E1009 13:25:59.402687 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:25:59 crc kubenswrapper[4762]: E1009 13:25:59.402809 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:00.40278215 +0000 UTC m=+36.176573399 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.413918 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.427768 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.448279 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.462922 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.483386 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.483459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.483482 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.483518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.483542 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.499991 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.521555 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.552573 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:59Z is after 2025-08-24T17:21:41Z" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.586464 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.586502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.586526 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.586543 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.586553 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.689132 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.689190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.689208 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.689231 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.689248 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.792020 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.792053 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.792061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.792075 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.792084 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.895111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.895219 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.895240 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.895263 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.895279 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.998198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.998254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.998273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.998297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:25:59 crc kubenswrapper[4762]: I1009 13:25:59.998316 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:25:59Z","lastTransitionTime":"2025-10-09T13:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.101056 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.101092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.101103 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.101119 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.101130 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.204506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.204844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.204855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.204872 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.204883 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.307380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.307464 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.307487 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.307516 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.307537 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.410115 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.410168 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.410184 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.410233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.410249 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.413622 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.413908 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.413993 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:02.413973329 +0000 UTC m=+38.187764378 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.512675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.512708 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.512715 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.512729 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.512738 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.616175 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.616253 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.616272 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.616304 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.616324 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.716481 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:26:16.716440249 +0000 UTC m=+52.490231338 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.716840 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.717125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.717378 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.717446 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.717513 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:16.717495846 +0000 UTC m=+52.491286985 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.717539 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:16.717527727 +0000 UTC m=+52.491318896 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.717229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.718240 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.718265 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.718275 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.718290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.718298 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.818445 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.818504 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.818692 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.818712 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.818725 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.818836 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:16.818812952 +0000 UTC m=+52.592604001 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.819216 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.819261 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.819278 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.819340 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:16.819320286 +0000 UTC m=+52.593111335 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.821068 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.821109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.821126 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.821148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.821166 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.924685 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.924794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.924814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.924839 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.924869 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:00Z","lastTransitionTime":"2025-10-09T13:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.964786 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.964984 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.965043 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.965407 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.965474 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:00 crc kubenswrapper[4762]: I1009 13:26:00.965477 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.965616 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:00 crc kubenswrapper[4762]: E1009 13:26:00.965816 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.027245 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.027287 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.027299 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.027315 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.027324 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.055928 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.055990 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.056008 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.056033 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.056049 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.075926 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:01Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.080618 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.080672 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.080684 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.080699 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.080712 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.096100 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:01Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.102743 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.102805 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.102824 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.102850 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.102869 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.117739 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:01Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.126022 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.126072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.126090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.126111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.126127 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.140265 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:01Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.144187 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.144271 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.144297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.144332 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.144358 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.160764 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:01Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:01 crc kubenswrapper[4762]: E1009 13:26:01.161016 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.162488 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.162544 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.162563 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.162585 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.162600 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.265661 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.265706 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.265721 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.265740 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.265754 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.369085 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.369154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.369171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.369195 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.369214 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.472927 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.473038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.473065 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.473098 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.473122 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.576567 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.576711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.576742 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.576771 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.576793 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.679874 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.679925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.679944 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.679970 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.679987 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.783357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.783449 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.783468 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.783494 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.783512 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.886040 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.886122 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.886131 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.886155 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.886167 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.989785 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.989846 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.989865 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.989891 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:01 crc kubenswrapper[4762]: I1009 13:26:01.989909 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:01Z","lastTransitionTime":"2025-10-09T13:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.092084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.092148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.092164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.092190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.092211 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.195216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.195290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.195306 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.195329 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.195345 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.297518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.297583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.297598 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.297618 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.297671 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.400262 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.400311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.400323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.400343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.400355 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.435127 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.435284 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.435345 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:06.435329391 +0000 UTC m=+42.209120440 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.503237 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.503304 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.503345 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.503382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.503406 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.606957 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.607039 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.607062 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.607101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.607124 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.710054 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.710101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.710117 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.710139 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.710156 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.812983 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.813055 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.813072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.813098 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.813118 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.915793 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.915898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.915917 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.915943 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.915960 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:02Z","lastTransitionTime":"2025-10-09T13:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.964808 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.964895 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.964844 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:02 crc kubenswrapper[4762]: I1009 13:26:02.965032 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.965035 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.965228 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.966393 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:02 crc kubenswrapper[4762]: E1009 13:26:02.966710 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.019142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.019200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.019216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.019236 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.019251 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.121984 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.122026 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.122038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.122053 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.122064 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.225006 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.225063 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.225077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.225094 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.225108 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.327520 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.327583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.327597 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.327618 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.327649 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.431386 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.431461 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.431480 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.431954 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.432008 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.535505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.535579 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.535593 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.535611 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.535623 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.638460 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.638519 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.638582 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.638616 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.638681 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.741069 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.741136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.741159 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.741188 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.741211 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.843820 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.843881 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.843902 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.843927 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.843945 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.947289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.947352 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.947369 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.947393 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:03 crc kubenswrapper[4762]: I1009 13:26:03.947410 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:03Z","lastTransitionTime":"2025-10-09T13:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.050456 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.050523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.050541 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.050568 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.050588 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.153798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.153851 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.153900 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.153925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.153941 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.256610 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.256710 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.256730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.256753 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.256768 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.359201 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.359272 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.359289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.359316 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.359334 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.462600 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.462716 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.462740 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.462776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.462799 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.566209 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.566288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.566314 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.566375 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.566401 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.668544 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.668580 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.668604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.668620 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.668651 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.771768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.771813 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.771824 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.771840 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.771848 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.874102 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.874149 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.874161 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.874178 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.874189 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.964858 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.964939 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.964969 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:04 crc kubenswrapper[4762]: E1009 13:26:04.965126 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.965258 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:04 crc kubenswrapper[4762]: E1009 13:26:04.965421 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:04 crc kubenswrapper[4762]: E1009 13:26:04.965512 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:04 crc kubenswrapper[4762]: E1009 13:26:04.965584 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.977008 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.977041 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.977050 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.977070 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.977080 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:04Z","lastTransitionTime":"2025-10-09T13:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:04 crc kubenswrapper[4762]: I1009 13:26:04.989056 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:04Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.003406 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.030774 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa009d3f8b7f73cb12d99c5110020d32d469a604fc5b3715343b754861cbf2cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:54Z\\\",\\\"message\\\":\\\"y (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 13:25:54.441462 6005 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1009 13:25:54.441489 6005 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1009 13:25:54.441528 6005 handler.go:208] Removed *v1.Node event handler 2\\\\nI1009 13:25:54.441590 6005 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1009 13:25:54.441617 6005 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1009 13:25:54.441694 6005 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1009 13:25:54.441722 6005 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 13:25:54.441760 6005 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1009 13:25:54.441766 6005 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 13:25:54.441812 6005 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1009 13:25:54.441615 6005 handler.go:208] Removed *v1.Node event handler 7\\\\nI1009 13:25:54.441858 6005 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1009 13:25:54.441874 6005 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 13:25:54.441915 6005 factory.go:656] Stopping watch factory\\\\nI1009 13:25:54.441939 6005 ovnkube.go:599] Stopped ovnkube\\\\nI1009 13:25:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.050293 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.066509 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.079679 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.079737 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.079754 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.079778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.079795 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.085696 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.097769 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.115413 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.127838 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.144303 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.164801 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.181104 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.182336 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.182376 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.182388 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.182404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.182415 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.196677 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.209955 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.225875 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.236484 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.237388 4762 scope.go:117] "RemoveContainer" containerID="3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84" Oct 09 13:26:05 crc kubenswrapper[4762]: E1009 13:26:05.237565 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.241625 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.256354 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.272285 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284155 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284423 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284435 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.284464 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.297731 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.316572 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.332064 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.345372 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.358744 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.372569 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.383751 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.386164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.386207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.386218 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.386234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.386247 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.395277 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.414465 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.433669 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.444628 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.460501 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.471194 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.489368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.489406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.489415 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.489428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.489436 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.594402 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.594480 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.594505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.594537 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.594559 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.698031 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.698123 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.698152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.698187 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.698211 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.800900 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.800973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.800996 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.801026 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.801049 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.903986 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.904051 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.904071 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.904097 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:05 crc kubenswrapper[4762]: I1009 13:26:05.904118 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:05Z","lastTransitionTime":"2025-10-09T13:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.007317 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.007388 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.007406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.007430 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.007450 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.130725 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.130807 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.130834 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.130859 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.130877 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.233164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.233234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.233253 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.233278 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.233296 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.336242 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.336310 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.336322 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.336338 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.336347 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.440058 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.440124 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.440146 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.440176 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.440201 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.482623 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.482880 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.483006 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:14.482975783 +0000 UTC m=+50.256766852 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.543175 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.543227 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.543239 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.543257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.543269 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.646434 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.646493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.646552 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.646576 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.646594 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.750531 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.750602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.750669 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.750704 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.750727 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.853560 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.853625 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.853670 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.853695 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.853714 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.956786 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.956822 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.956836 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.956853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.956863 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:06Z","lastTransitionTime":"2025-10-09T13:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.964695 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.964722 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.964736 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.964810 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:06 crc kubenswrapper[4762]: I1009 13:26:06.964821 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.964930 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.965055 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:06 crc kubenswrapper[4762]: E1009 13:26:06.965179 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.063771 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.063856 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.063881 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.063910 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.063930 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.166495 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.166558 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.166580 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.166612 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.166664 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.268890 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.268965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.268989 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.269019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.269049 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.372302 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.372368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.372386 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.372408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.372427 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.476221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.476349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.476374 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.476406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.476428 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.579776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.579831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.579847 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.579869 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.579881 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.682628 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.682683 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.682693 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.682709 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.682719 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.788233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.788308 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.788326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.788349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.788365 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.891483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.891545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.891564 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.891604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.891623 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.995081 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.995133 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.995143 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.995163 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:07 crc kubenswrapper[4762]: I1009 13:26:07.995177 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:07Z","lastTransitionTime":"2025-10-09T13:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.098224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.098266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.098277 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.098292 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.098304 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.200956 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.201005 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.201016 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.201035 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.201048 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.303251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.303313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.303330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.303353 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.303370 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.407057 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.407116 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.407135 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.407160 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.407178 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.509904 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.509973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.509992 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.510018 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.510035 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.613996 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.614055 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.614072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.614096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.614113 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.716443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.716483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.716605 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.716622 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.716846 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.820498 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.820563 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.820586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.820617 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.820673 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.923762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.923818 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.923837 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.923860 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.923876 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:08Z","lastTransitionTime":"2025-10-09T13:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.965170 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.965234 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.965245 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:08 crc kubenswrapper[4762]: I1009 13:26:08.965184 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:08 crc kubenswrapper[4762]: E1009 13:26:08.965366 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:08 crc kubenswrapper[4762]: E1009 13:26:08.965814 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:08 crc kubenswrapper[4762]: E1009 13:26:08.965961 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:08 crc kubenswrapper[4762]: E1009 13:26:08.965617 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.027172 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.027241 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.027262 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.027334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.027358 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.130484 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.130527 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.130538 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.130555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.130566 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.233142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.233203 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.233214 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.233231 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.233244 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.335971 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.336032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.336045 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.336069 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.336081 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.439515 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.439593 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.439613 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.439696 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.439734 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.543003 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.543066 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.543095 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.543123 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.543141 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.645477 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.645545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.645558 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.645573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.645585 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.748001 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.748112 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.748133 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.748154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.748169 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.850250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.850285 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.850293 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.850308 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.850317 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.953321 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.953366 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.953383 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.953404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:09 crc kubenswrapper[4762]: I1009 13:26:09.953421 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:09Z","lastTransitionTime":"2025-10-09T13:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.056319 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.056463 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.056478 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.056494 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.056506 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.159685 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.159731 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.159743 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.159762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.159774 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.262470 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.262751 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.262853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.262996 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.263111 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.366901 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.366937 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.366951 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.366965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.366976 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.469806 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.469865 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.469896 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.469941 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.469963 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.573380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.573434 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.573455 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.573483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.573505 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.676607 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.676700 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.676713 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.676732 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.676742 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.779309 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.779353 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.779364 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.779380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.779393 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.883448 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.883515 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.883533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.883557 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.883575 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.964532 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.964618 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:10 crc kubenswrapper[4762]: E1009 13:26:10.964807 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.964557 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.964902 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:10 crc kubenswrapper[4762]: E1009 13:26:10.965069 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:10 crc kubenswrapper[4762]: E1009 13:26:10.965255 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:10 crc kubenswrapper[4762]: E1009 13:26:10.965410 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.986674 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.986715 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.986727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.986742 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:10 crc kubenswrapper[4762]: I1009 13:26:10.986753 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:10Z","lastTransitionTime":"2025-10-09T13:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.089334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.089418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.089444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.089479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.089502 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.192212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.192289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.192311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.192341 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.192359 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.294766 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.294905 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.294929 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.294959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.294981 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.395141 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.395185 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.395197 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.395212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.395222 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.407834 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:11Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.412229 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.412262 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.412273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.412290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.412301 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.429100 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:11Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.432582 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.432612 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.432623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.432663 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.432677 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.447109 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:11Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.451089 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.451113 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.451121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.451136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.451147 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.466944 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:11Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.471145 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.471221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.471244 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.471275 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.471330 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.486089 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:11Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:11 crc kubenswrapper[4762]: E1009 13:26:11.486246 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.487974 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.488008 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.488019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.488034 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.488045 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.591198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.591250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.591258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.591271 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.591279 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.695272 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.695325 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.695351 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.695373 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.695387 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.798169 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.798263 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.798302 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.798333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.798357 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.901526 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.901601 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.901625 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.901688 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:11 crc kubenswrapper[4762]: I1009 13:26:11.901711 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:11Z","lastTransitionTime":"2025-10-09T13:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.005079 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.005138 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.005160 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.005190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.005212 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.108590 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.108697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.108722 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.108761 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.108783 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.126145 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.135425 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.150666 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.168082 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.189097 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.204931 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.211470 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.211538 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.211563 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.211591 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.211613 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.227211 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.250696 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.263099 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.276459 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.289985 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.306099 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.313699 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.313741 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.313755 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.313775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.313790 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.322921 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.333292 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.347103 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.359846 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.375603 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.402489 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:12Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.416742 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.416803 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.416827 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.416858 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.416886 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.520526 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.520925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.521066 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.521199 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.521343 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.625542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.625623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.625689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.625721 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.625745 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.728377 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.728450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.728476 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.728507 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.728529 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.830547 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.830661 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.830684 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.830700 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.830711 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.934521 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.934583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.934602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.934625 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.934682 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:12Z","lastTransitionTime":"2025-10-09T13:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.964504 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.964596 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.964529 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:12 crc kubenswrapper[4762]: E1009 13:26:12.964750 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:12 crc kubenswrapper[4762]: E1009 13:26:12.964885 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:12 crc kubenswrapper[4762]: E1009 13:26:12.965039 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:12 crc kubenswrapper[4762]: I1009 13:26:12.965220 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:12 crc kubenswrapper[4762]: E1009 13:26:12.965364 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.037173 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.037357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.037370 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.037385 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.037394 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.140840 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.140897 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.140914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.140937 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.140952 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.244075 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.244128 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.244143 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.244166 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.244183 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.347479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.347828 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.347973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.348113 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.348233 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.450874 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.450930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.450948 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.450972 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.450990 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.554699 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.554778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.554803 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.554834 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.554857 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.658692 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.658755 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.658769 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.658791 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.658804 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.761603 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.761688 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.761705 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.761730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.761748 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.865040 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.865100 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.865121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.865147 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.865164 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.968237 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.968302 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.968318 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.968344 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:13 crc kubenswrapper[4762]: I1009 13:26:13.968363 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:13Z","lastTransitionTime":"2025-10-09T13:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.071099 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.071134 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.071142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.071155 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.071164 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.173165 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.173218 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.173231 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.173250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.173261 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.275248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.275316 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.275330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.275349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.275364 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.378101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.378179 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.378204 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.378239 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.378262 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.481349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.481389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.481406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.481430 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.481447 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.569865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.570030 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.570124 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:30.570101376 +0000 UTC m=+66.343892445 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.584147 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.584410 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.584474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.584536 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.584597 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.687515 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.687751 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.687841 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.687909 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.687974 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.789936 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.790005 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.790019 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.790035 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.790046 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.892759 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.892825 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.892842 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.892866 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.892883 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.964684 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.964880 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.965013 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.965076 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.965162 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.965262 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.965268 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:14 crc kubenswrapper[4762]: E1009 13:26:14.965333 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.983940 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:14Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.996216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.996572 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.997137 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.997404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:14 crc kubenswrapper[4762]: I1009 13:26:14.997728 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:14Z","lastTransitionTime":"2025-10-09T13:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.017875 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.040324 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.061099 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.085706 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.100533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.100611 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.100666 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.100697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.100710 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.106710 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.122996 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.145237 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.157468 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.169686 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.188271 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.197430 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.202851 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.202981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.203038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.203115 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.203175 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.210448 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.222603 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.234020 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.249315 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.262454 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:15Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.305711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.305750 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.305762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.305779 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.305792 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.408092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.408426 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.408572 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.408739 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.408885 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.512049 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.512109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.512125 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.512148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.512166 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.615406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.615471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.615491 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.615518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.615536 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.718109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.718152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.718164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.718183 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.718194 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.821653 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.821678 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.821687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.821700 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.821708 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.923764 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.923839 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.923867 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.923900 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:15 crc kubenswrapper[4762]: I1009 13:26:15.923922 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:15Z","lastTransitionTime":"2025-10-09T13:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.027524 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.027926 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.028138 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.028378 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.028583 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.131899 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.132291 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.132536 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.132811 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.133020 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.235470 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.235725 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.235845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.235955 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.236044 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.339027 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.339251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.339343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.339415 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.339471 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.442192 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.442250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.442267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.442295 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.442312 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.545787 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.545827 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.545836 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.545854 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.545864 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.649291 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.649344 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.649354 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.649371 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.649383 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.754273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.754381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.754414 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.754459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.754504 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.792510 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.792883 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.792984 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.793114 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.793183 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.793243 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:26:48.793097034 +0000 UTC m=+84.566888143 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.793304 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:48.793273479 +0000 UTC m=+84.567064558 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.793352 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:48.79333709 +0000 UTC m=+84.567128169 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.857728 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.857783 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.857798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.857819 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.857835 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.894596 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.894714 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894902 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894908 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894928 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894951 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894955 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.894975 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.895037 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:48.895017266 +0000 UTC m=+84.668808335 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.895062 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:26:48.895051297 +0000 UTC m=+84.668842366 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.961105 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.961162 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.961180 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.961205 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.961222 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:16Z","lastTransitionTime":"2025-10-09T13:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.964471 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.964616 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.964624 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.964806 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:16 crc kubenswrapper[4762]: I1009 13:26:16.964837 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.964988 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.965168 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:16 crc kubenswrapper[4762]: E1009 13:26:16.965370 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.064305 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.064359 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.064378 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.064403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.064420 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.167048 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.167115 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.167138 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.167185 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.167216 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.270425 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.270494 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.270511 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.270540 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.270557 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.372895 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.372980 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.373010 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.373045 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.373072 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.476212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.476311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.476332 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.476366 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.476404 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.579755 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.579828 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.579852 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.579884 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.579906 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.683143 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.683207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.683224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.683248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.683267 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.786675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.786754 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.786773 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.786801 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.786821 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.889289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.889326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.889337 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.889354 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.889367 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.993030 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.993129 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.993153 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.993182 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:17 crc kubenswrapper[4762]: I1009 13:26:17.993205 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:17Z","lastTransitionTime":"2025-10-09T13:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.095922 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.096043 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.096071 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.096101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.096124 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.199258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.199289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.199297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.199363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.199381 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.302257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.302293 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.302306 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.302323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.302344 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.406028 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.406095 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.406117 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.406145 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.406167 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.509389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.509497 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.509519 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.509541 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.509558 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.612450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.612530 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.612550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.612585 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.612615 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.715734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.715806 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.715819 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.715841 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.715856 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.820296 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.820360 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.820382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.820417 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.820443 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.923614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.923749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.923807 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.923845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.923871 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:18Z","lastTransitionTime":"2025-10-09T13:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.964898 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.964995 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:18 crc kubenswrapper[4762]: E1009 13:26:18.965114 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.965221 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:18 crc kubenswrapper[4762]: E1009 13:26:18.965249 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:18 crc kubenswrapper[4762]: I1009 13:26:18.964898 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:18 crc kubenswrapper[4762]: E1009 13:26:18.965414 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:18 crc kubenswrapper[4762]: E1009 13:26:18.965526 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.027124 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.027223 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.027246 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.027278 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.027299 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.130666 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.130741 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.130769 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.130798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.130820 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.233120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.233181 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.233204 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.233233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.233255 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.337154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.337257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.337283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.337308 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.337327 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.440148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.440249 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.440274 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.440303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.440324 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.542984 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.543057 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.543067 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.543091 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.543101 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.645323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.645384 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.645406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.645436 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.645457 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.748493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.749061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.749291 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.749476 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.749625 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.853444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.853506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.853529 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.853556 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.853580 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.956481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.956542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.956574 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.956614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.956686 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:19Z","lastTransitionTime":"2025-10-09T13:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:19 crc kubenswrapper[4762]: I1009 13:26:19.966168 4762 scope.go:117] "RemoveContainer" containerID="3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.058980 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.059275 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.059284 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.059298 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.059306 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.162028 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.162066 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.162082 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.162102 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.162118 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.264768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.264814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.264827 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.264846 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.264861 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.319208 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/1.log" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.322349 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.322991 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.340376 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.364846 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.366790 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.366831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.366844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.366898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.366914 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.386948 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.398332 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.409443 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.419727 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.431018 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.448980 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.459738 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.469646 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.469686 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.469698 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.469716 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.469728 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.471947 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.481431 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.497488 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.510287 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.524543 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.534715 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.547188 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.563377 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.572471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.572532 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.572542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.572560 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.572570 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.674977 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.675046 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.675067 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.675088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.675103 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.778077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.778140 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.778156 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.778178 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.778195 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.881616 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.881721 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.881743 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.881771 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.881792 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.964237 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:20 crc kubenswrapper[4762]: E1009 13:26:20.964446 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.964267 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:20 crc kubenswrapper[4762]: E1009 13:26:20.964572 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.965915 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.966026 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:20 crc kubenswrapper[4762]: E1009 13:26:20.966353 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:20 crc kubenswrapper[4762]: E1009 13:26:20.966680 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.984918 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.984969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.984982 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.985004 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:20 crc kubenswrapper[4762]: I1009 13:26:20.985017 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:20Z","lastTransitionTime":"2025-10-09T13:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.087828 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.087874 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.087886 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.087906 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.087920 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.191171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.191250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.191276 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.191305 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.191327 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.294530 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.294687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.294713 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.294746 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.294769 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.328605 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/2.log" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.329411 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/1.log" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.333283 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" exitCode=1 Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.333325 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.333376 4762 scope.go:117] "RemoveContainer" containerID="3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.335438 4762 scope.go:117] "RemoveContainer" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.335725 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.357876 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.375907 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.391231 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.399094 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.399121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.399131 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.399146 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.399157 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.407034 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.425721 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.443618 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.456284 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.468864 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.487743 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a5b3dbf7adcc609a6fbc414eb3eba302ecf6355c195221fa03ebe90a8b4eb84\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:25:56Z\\\",\\\"message\\\":\\\"81 6162 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:25:56Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:25:56.084172 6162 services_controller.go:451] Built service openshift-ingress/router-internal-default cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress/router-internal-default_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress/router-internal-default\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]serv\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:55Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.498289 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.502143 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.502175 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.502187 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.502202 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.502213 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.513879 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.514133 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.514264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.514471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.514617 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.529790 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.543624 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.548683 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.548984 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.549071 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.549156 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.549251 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.560333 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.564522 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.567677 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.567793 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.567877 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.567981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.568059 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.571006 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.579139 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.581562 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.582229 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.582258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.582267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.582281 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.582311 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.594734 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.595997 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.598391 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.598448 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.598458 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.598473 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.598483 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.606437 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.610978 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: E1009 13:26:21.611141 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.612507 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.612537 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.612545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.612557 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.612567 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.620046 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:21Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.716220 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.716288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.716313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.716340 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.716362 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.819134 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.819196 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.819213 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.819237 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.819254 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.922439 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.922504 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.922521 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.922545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:21 crc kubenswrapper[4762]: I1009 13:26:21.922562 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:21Z","lastTransitionTime":"2025-10-09T13:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.025264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.025290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.025300 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.025314 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.025324 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.127794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.127848 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.127867 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.127891 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.127910 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.230320 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.230348 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.230358 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.230373 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.230382 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.333872 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.333919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.333952 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.333973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.333988 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.340048 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/2.log" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.346335 4762 scope.go:117] "RemoveContainer" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" Oct 09 13:26:22 crc kubenswrapper[4762]: E1009 13:26:22.346602 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.368111 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.386497 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.404045 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.425828 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.437047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.437092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.437109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.437131 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.437148 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.445767 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.465463 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.490125 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.508172 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.528068 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.541029 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.541093 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.541107 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.541127 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.541139 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.545930 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.567384 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.586116 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.603407 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.615108 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.627828 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.643675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.643741 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.643765 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.643789 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.643809 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.645135 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.654945 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:22Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.746704 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.746768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.746785 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.746811 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.746829 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.849836 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.849878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.849898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.849921 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.849937 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.953207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.953266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.953282 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.953304 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.953317 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:22Z","lastTransitionTime":"2025-10-09T13:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.964733 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.964791 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.964758 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:22 crc kubenswrapper[4762]: E1009 13:26:22.964934 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:22 crc kubenswrapper[4762]: I1009 13:26:22.964972 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:22 crc kubenswrapper[4762]: E1009 13:26:22.965102 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:22 crc kubenswrapper[4762]: E1009 13:26:22.965159 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:22 crc kubenswrapper[4762]: E1009 13:26:22.965241 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.055733 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.055794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.055811 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.055843 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.055878 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.158452 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.158502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.158523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.158550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.158570 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.260879 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.260911 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.260919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.260933 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.260944 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.364349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.364392 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.364408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.364428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.364441 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.466686 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.466715 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.466724 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.466738 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.466748 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.569782 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.569858 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.569883 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.569913 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.569936 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.673283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.673392 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.673403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.673418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.673429 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.777211 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.777322 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.777348 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.777376 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.777393 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.879227 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.879257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.879267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.879281 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.879293 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.981969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.982026 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.982046 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.982071 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:23 crc kubenswrapper[4762]: I1009 13:26:23.982089 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:23Z","lastTransitionTime":"2025-10-09T13:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.083868 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.083944 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.083969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.084001 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.084026 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.186524 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.186657 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.186676 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.186714 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.186729 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.289843 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.289880 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.289892 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.289907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.289918 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.392831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.393133 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.393290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.393436 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.393564 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.496978 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.497039 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.497061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.497090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.497121 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.600052 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.600104 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.600123 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.600154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.600172 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.703589 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.703695 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.703720 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.703748 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.703940 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.806957 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.807038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.807074 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.807103 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.807123 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.910113 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.910193 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.910218 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.910251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.910277 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:24Z","lastTransitionTime":"2025-10-09T13:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.964337 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.964387 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.964461 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.964756 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:24 crc kubenswrapper[4762]: E1009 13:26:24.965372 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:24 crc kubenswrapper[4762]: E1009 13:26:24.965482 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:24 crc kubenswrapper[4762]: E1009 13:26:24.965617 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:24 crc kubenswrapper[4762]: E1009 13:26:24.965803 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:24 crc kubenswrapper[4762]: I1009 13:26:24.989660 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:24Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.009350 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.014032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.014239 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.014461 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.014747 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.014979 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.029548 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.060057 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.080153 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.094393 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.113820 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.118619 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.118990 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.119068 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.119142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.119232 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.130359 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.150117 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.170565 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.188613 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.205459 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.223346 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.230044 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.230084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.230096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.230116 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.230128 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.241510 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.256070 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.269699 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.287785 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:25Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.332617 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.332689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.332705 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.332727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.332742 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.435458 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.435506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.435520 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.435542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.435557 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.538837 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.538866 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.538875 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.538888 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.538897 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.641847 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.642372 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.642514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.642673 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.642813 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.746140 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.746204 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.746222 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.746253 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.746273 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.851730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.851814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.851838 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.851871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.851896 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.954920 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.954961 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.954970 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.954983 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:25 crc kubenswrapper[4762]: I1009 13:26:25.954994 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:25Z","lastTransitionTime":"2025-10-09T13:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.057442 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.057493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.057510 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.057532 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.057550 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.160862 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.162229 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.162358 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.162492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.162589 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.265675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.265733 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.265754 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.265794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.265814 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.368136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.368200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.368217 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.368240 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.368259 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.471720 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.471778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.471796 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.471822 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.471842 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.573808 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.573841 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.573852 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.573865 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.573874 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.675477 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.675509 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.675518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.675531 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.675540 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.778040 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.778068 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.778076 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.778089 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.778097 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.880830 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.880897 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.880914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.880941 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.880957 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.964809 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:26 crc kubenswrapper[4762]: E1009 13:26:26.964939 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.965026 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.965100 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.965044 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:26 crc kubenswrapper[4762]: E1009 13:26:26.965234 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:26 crc kubenswrapper[4762]: E1009 13:26:26.965299 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:26 crc kubenswrapper[4762]: E1009 13:26:26.965357 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.984091 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.984159 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.984176 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.984201 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:26 crc kubenswrapper[4762]: I1009 13:26:26.984218 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:26Z","lastTransitionTime":"2025-10-09T13:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.087769 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.087844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.087857 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.087897 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.087910 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.190324 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.190405 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.190428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.190456 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.190478 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.293065 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.293105 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.293114 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.293128 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.293138 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.395178 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.395219 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.395229 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.395243 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.395253 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.497735 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.497775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.497784 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.497800 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.497809 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.600552 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.600655 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.600687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.600711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.600724 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.702915 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.702965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.702977 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.702996 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.703010 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.805234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.805293 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.805311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.805334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.805354 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.908396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.908462 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.908475 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.908495 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:27 crc kubenswrapper[4762]: I1009 13:26:27.908507 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:27Z","lastTransitionTime":"2025-10-09T13:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.010271 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.010312 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.010322 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.010334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.010343 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.112602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.112661 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.112673 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.112689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.112702 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.215482 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.215608 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.215625 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.215660 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.215674 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.318422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.318471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.318505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.318524 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.318536 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.420458 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.420493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.420502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.420516 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.420526 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.523945 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.523993 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.524010 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.524032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.524050 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.626735 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.627212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.627355 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.627577 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.627744 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.731090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.731162 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.731181 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.731207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.731225 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.834151 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.834381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.834460 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.834545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.834666 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.936623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.936681 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.936693 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.936711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.936724 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:28Z","lastTransitionTime":"2025-10-09T13:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.964647 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.964621 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.964715 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:28 crc kubenswrapper[4762]: E1009 13:26:28.964817 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:28 crc kubenswrapper[4762]: I1009 13:26:28.964981 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:28 crc kubenswrapper[4762]: E1009 13:26:28.965035 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:28 crc kubenswrapper[4762]: E1009 13:26:28.965157 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:28 crc kubenswrapper[4762]: E1009 13:26:28.965436 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.039801 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.039849 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.039858 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.039876 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.039887 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.142928 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.142958 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.142966 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.142995 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.143006 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.245254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.245280 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.245288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.245301 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.245310 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.347415 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.347463 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.347474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.347490 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.347501 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.450486 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.450528 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.450539 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.450584 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.450597 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.552886 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.552921 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.552930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.552945 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.552956 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.655181 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.655244 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.655260 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.655284 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.655303 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.757931 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.757978 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.757991 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.758015 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.758029 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.860786 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.860863 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.860888 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.860919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.860942 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.963406 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.963463 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.963479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.963503 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:29 crc kubenswrapper[4762]: I1009 13:26:29.963521 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:29Z","lastTransitionTime":"2025-10-09T13:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.065690 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.065750 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.065766 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.065792 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.065809 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.168343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.168411 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.168433 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.168465 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.168500 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.270992 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.271062 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.271087 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.271116 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.271141 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.373723 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.373781 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.373790 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.373807 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.373817 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.476459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.476507 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.476518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.476535 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.476545 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.578479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.578538 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.578555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.578578 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.578595 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.641118 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.641310 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.641376 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:27:02.641358315 +0000 UTC m=+98.415149354 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.680296 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.680333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.680345 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.680363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.680374 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.782652 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.782687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.782697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.782710 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.782720 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.885772 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.885824 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.885835 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.885857 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.885884 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.964692 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.964761 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.964761 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.964692 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.964862 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.964957 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.965028 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:30 crc kubenswrapper[4762]: E1009 13:26:30.965044 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.988856 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.988907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.988919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.988938 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:30 crc kubenswrapper[4762]: I1009 13:26:30.988951 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:30Z","lastTransitionTime":"2025-10-09T13:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.090851 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.090884 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.090893 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.090907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.090917 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.193333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.193380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.193389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.193403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.193413 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.296007 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.296041 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.296050 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.296064 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.296073 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.398513 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.398545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.398554 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.398566 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.398576 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.501805 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.501862 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.501891 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.501916 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.501934 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.604603 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.604699 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.604713 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.604731 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.604743 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.707303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.707358 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.707379 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.707403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.707422 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.810436 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.810475 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.810490 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.810508 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.810521 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.834554 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.834607 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.834617 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.834691 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.834712 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.849451 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:31Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.853233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.853287 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.853300 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.853318 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.853329 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.868184 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:31Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.871334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.871377 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.871389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.871408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.871421 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.883758 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:31Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.887835 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.887881 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.887893 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.887915 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.887931 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.901083 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:31Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.905909 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.905949 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.905958 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.905974 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.905984 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.920466 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:31Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:31 crc kubenswrapper[4762]: E1009 13:26:31.920983 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.922707 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.922817 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.922898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.922981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:31 crc kubenswrapper[4762]: I1009 13:26:31.923081 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:31Z","lastTransitionTime":"2025-10-09T13:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.026260 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.026295 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.026303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.026317 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.026326 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.129014 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.129061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.129072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.129087 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.129101 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.231835 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.232075 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.232185 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.232269 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.232349 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.334163 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.334207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.334216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.334230 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.334240 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.376922 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/0.log" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.376970 4762 generic.go:334] "Generic (PLEG): container finished" podID="c847aae6-277a-45dc-86d0-9b175f7e8177" containerID="cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644" exitCode=1 Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.376995 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerDied","Data":"cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.377354 4762 scope.go:117] "RemoveContainer" containerID="cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.388552 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.405061 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.418791 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.433583 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.438284 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.438323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.438333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.438348 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.438362 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.450555 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.462048 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.473185 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.486151 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.498158 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.512971 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.536003 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.540681 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.540812 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.540912 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.541000 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.541120 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.549780 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.563411 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.574981 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.587315 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.597717 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.610190 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:32Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.643727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.643781 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.643792 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.643808 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.643820 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.746477 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.746514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.746523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.746537 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.746548 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.849643 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.849679 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.849688 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.849704 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.849714 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.951780 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.951814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.951824 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.951839 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.951850 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:32Z","lastTransitionTime":"2025-10-09T13:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.964570 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:32 crc kubenswrapper[4762]: E1009 13:26:32.964804 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.964667 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.964652 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:32 crc kubenswrapper[4762]: I1009 13:26:32.964847 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:32 crc kubenswrapper[4762]: E1009 13:26:32.965194 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:32 crc kubenswrapper[4762]: E1009 13:26:32.965271 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:32 crc kubenswrapper[4762]: E1009 13:26:32.965333 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.054361 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.054422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.054434 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.054452 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.054469 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.157322 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.157363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.157372 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.157386 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.157395 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.259756 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.259794 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.259805 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.259820 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.259832 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.362732 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.362775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.362786 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.362801 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.362812 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.382475 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/0.log" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.382561 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerStarted","Data":"0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.396236 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.412549 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.424478 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.436582 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.447604 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.462158 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.465132 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.465165 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.465176 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.465192 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.465202 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.475520 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.485907 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.497584 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.518930 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.532705 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.544557 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.558863 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.567404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.567445 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.567457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.567474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.567486 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.570858 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.587386 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.608783 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.622070 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:33Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.669570 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.669668 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.669689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.669712 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.669728 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.771814 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.771860 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.771873 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.771890 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.771902 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.874125 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.874171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.874179 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.874193 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.874202 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.978518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.978582 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.978597 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.978619 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:33 crc kubenswrapper[4762]: I1009 13:26:33.978655 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:33Z","lastTransitionTime":"2025-10-09T13:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.081120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.081167 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.081176 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.081190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.081202 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.183998 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.184047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.184059 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.184077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.184089 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.286519 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.286578 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.286590 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.286610 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.286624 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.389235 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.389279 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.389288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.389303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.389313 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.491781 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.491829 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.491846 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.491870 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.491887 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.593752 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.593796 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.593807 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.593823 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.593838 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.695697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.695736 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.695748 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.695763 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.695775 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.797583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.797665 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.797680 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.797697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.797707 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.900142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.900203 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.900216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.900234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.900249 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:34Z","lastTransitionTime":"2025-10-09T13:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.964812 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.964847 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.964867 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:34 crc kubenswrapper[4762]: E1009 13:26:34.965158 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:34 crc kubenswrapper[4762]: E1009 13:26:34.964933 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:34 crc kubenswrapper[4762]: E1009 13:26:34.965275 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.964893 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:34 crc kubenswrapper[4762]: E1009 13:26:34.965374 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.980326 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:34Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:34 crc kubenswrapper[4762]: I1009 13:26:34.992007 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:34Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.003257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.003297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.003308 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.003325 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.003336 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.007656 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.020438 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.040875 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.055076 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.065815 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.079612 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.091406 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.103827 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.105203 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.105279 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.105407 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.105426 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.105438 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.118719 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.128009 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.141762 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.154841 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.167401 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.180755 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.190989 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:35Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.207721 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.207755 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.207765 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.207780 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.207791 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.311238 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.311270 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.311278 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.311290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.311299 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.414287 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.414347 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.414367 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.414392 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.414412 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.517715 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.518002 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.518020 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.518041 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.518059 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.620368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.620416 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.620427 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.620443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.620453 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.723265 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.723308 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.723319 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.723335 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.723349 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.826172 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.826204 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.826212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.826224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.826232 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.929934 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.929986 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.929998 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.930015 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.930054 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:35Z","lastTransitionTime":"2025-10-09T13:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:35 crc kubenswrapper[4762]: I1009 13:26:35.965842 4762 scope.go:117] "RemoveContainer" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" Oct 09 13:26:35 crc kubenswrapper[4762]: E1009 13:26:35.966103 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.033519 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.033614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.033696 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.033817 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.033862 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.136420 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.136474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.136485 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.136502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.136513 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.239032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.239092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.239106 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.239131 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.239146 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.342146 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.342855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.342882 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.342904 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.342921 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.444912 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.444961 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.444970 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.444988 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.444999 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.548781 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.548837 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.548855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.548878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.548894 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.651453 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.651533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.651559 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.651589 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.651611 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.754456 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.754485 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.754493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.754506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.754515 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.857103 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.857152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.857163 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.857178 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.857189 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.960002 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.960054 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.960064 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.960077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.960086 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:36Z","lastTransitionTime":"2025-10-09T13:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.964311 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:36 crc kubenswrapper[4762]: E1009 13:26:36.964457 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.964490 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.964519 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:36 crc kubenswrapper[4762]: E1009 13:26:36.964672 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:36 crc kubenswrapper[4762]: I1009 13:26:36.964530 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:36 crc kubenswrapper[4762]: E1009 13:26:36.964787 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:36 crc kubenswrapper[4762]: E1009 13:26:36.964856 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.062233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.062266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.062274 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.062287 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.062295 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.165084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.165162 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.165175 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.165191 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.165201 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.268230 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.268292 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.268312 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.268336 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.268354 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.371011 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.371094 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.371128 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.371156 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.371177 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.473604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.473695 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.473711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.473735 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.473753 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.576202 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.576258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.576275 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.576305 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.576327 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.678408 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.678450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.678459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.678473 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.678483 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.781373 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.781431 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.781443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.781464 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.781477 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.884070 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.884124 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.884142 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.884164 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.884179 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.986237 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.986930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.986954 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.986973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:37 crc kubenswrapper[4762]: I1009 13:26:37.986986 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:37Z","lastTransitionTime":"2025-10-09T13:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.089811 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.089844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.089853 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.089868 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.089877 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.192329 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.192373 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.192382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.192396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.192406 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.295028 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.295056 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.295064 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.295077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.295085 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.397574 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.397622 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.397659 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.397680 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.397695 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.500453 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.500508 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.500525 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.500546 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.500650 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.602468 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.602523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.602538 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.602562 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.602578 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.704779 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.704835 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.704850 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.704866 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.704882 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.808380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.808443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.808453 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.808469 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.808481 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.910898 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.910939 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.910950 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.910974 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.910985 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:38Z","lastTransitionTime":"2025-10-09T13:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.964803 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:38 crc kubenswrapper[4762]: E1009 13:26:38.964931 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.964804 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.964961 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:38 crc kubenswrapper[4762]: I1009 13:26:38.964798 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:38 crc kubenswrapper[4762]: E1009 13:26:38.965024 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:38 crc kubenswrapper[4762]: E1009 13:26:38.965074 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:38 crc kubenswrapper[4762]: E1009 13:26:38.965122 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.013881 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.013947 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.013970 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.013997 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.014018 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.116439 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.116500 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.116518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.116542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.116557 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.218828 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.218876 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.218889 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.218905 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.218917 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.326492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.326538 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.326550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.326566 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.326579 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.428875 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.428948 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.428959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.428975 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.428987 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.531296 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.531342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.531352 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.531370 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.531381 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.634168 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.634199 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.634209 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.634221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.634231 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.736197 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.736223 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.736233 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.736248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.736259 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.838833 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.838872 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.838882 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.838894 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.838924 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.941952 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.941990 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.941999 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.942014 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:39 crc kubenswrapper[4762]: I1009 13:26:39.942026 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:39Z","lastTransitionTime":"2025-10-09T13:26:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.044438 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.044477 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.044488 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.044505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.044518 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.147257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.147313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.147329 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.147352 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.147369 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.250251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.250303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.250312 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.250326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.250335 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.352303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.352353 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.352367 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.352387 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.352404 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.454903 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.454934 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.454943 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.454955 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.454965 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.557498 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.557580 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.557613 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.557685 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.557710 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.659846 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.659878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.659965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.659985 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.659994 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.762969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.763036 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.763060 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.763084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.763102 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.866153 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.866198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.866207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.866223 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.866232 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.964618 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.964745 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:40 crc kubenswrapper[4762]: E1009 13:26:40.964770 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.964941 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.964938 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:40 crc kubenswrapper[4762]: E1009 13:26:40.964992 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:40 crc kubenswrapper[4762]: E1009 13:26:40.964933 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:40 crc kubenswrapper[4762]: E1009 13:26:40.965163 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.969852 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.969905 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.969927 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.969953 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:40 crc kubenswrapper[4762]: I1009 13:26:40.969974 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:40Z","lastTransitionTime":"2025-10-09T13:26:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.073184 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.073273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.073299 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.073331 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.073365 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.176762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.176848 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.176870 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.176899 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.176920 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.279964 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.280033 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.280048 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.280097 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.280107 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.383565 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.383659 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.383675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.383700 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.383710 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.487434 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.487483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.487514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.487531 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.487541 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.590273 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.590317 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.590326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.590343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.590353 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.693147 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.693279 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.693367 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.693394 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.693409 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.795540 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.795615 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.795671 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.795703 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.795727 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.898608 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.898879 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.898949 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.899029 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.899101 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.949547 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.949844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.949924 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.950006 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.950094 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: E1009 13:26:41.964381 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:41Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.968205 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.968451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.968611 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.968917 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.969064 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:41 crc kubenswrapper[4762]: E1009 13:26:41.982785 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:41Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.988216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.988299 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.988318 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.988342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:41 crc kubenswrapper[4762]: I1009 13:26:41.988355 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:41Z","lastTransitionTime":"2025-10-09T13:26:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.005242 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:42Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.010368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.010418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.010436 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.010458 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.010472 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.025163 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:42Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.029074 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.029158 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.029182 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.029216 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.029240 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.044758 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:42Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.044906 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.046703 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.046739 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.046750 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.046768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.046780 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.149275 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.149570 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.149717 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.149847 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.149972 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.252694 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.253020 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.253121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.253219 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.253302 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.356913 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.357008 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.357038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.357075 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.357098 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.462762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.462826 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.462845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.462871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.462898 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.565193 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.565236 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.565246 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.565261 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.565271 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.668145 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.668200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.668217 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.668240 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.668263 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.770901 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.770943 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.770955 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.770972 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.770982 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.874016 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.874056 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.874067 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.874083 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.874094 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.965479 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.965518 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.966090 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.965751 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.966420 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.965599 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.966792 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:42 crc kubenswrapper[4762]: E1009 13:26:42.966154 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.976459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.976500 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.976512 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.976717 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:42 crc kubenswrapper[4762]: I1009 13:26:42.976734 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:42Z","lastTransitionTime":"2025-10-09T13:26:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.079373 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.079412 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.079423 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.079439 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.079450 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.182545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.182608 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.182626 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.182698 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.182719 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.285387 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.285423 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.285431 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.285446 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.285456 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.387389 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.387441 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.387452 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.387466 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.387478 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.491122 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.491201 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.491224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.491248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.491265 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.594296 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.594331 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.594340 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.594356 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.594365 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.697264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.697323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.697342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.697366 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.697387 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.799588 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.799923 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.800009 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.800108 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.800182 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.902571 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.902804 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.902906 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.902994 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:43 crc kubenswrapper[4762]: I1009 13:26:43.903077 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:43Z","lastTransitionTime":"2025-10-09T13:26:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.006294 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.006356 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.006377 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.006403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.006423 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.109418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.109478 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.109499 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.109525 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.109546 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.212326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.212368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.212385 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.212411 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.212428 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.314945 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.315014 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.315033 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.315057 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.315074 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.418245 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.418485 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.418569 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.418672 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.418756 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.521017 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.521088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.521113 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.521144 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.521167 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.623107 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.623148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.623158 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.623174 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.623185 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.726127 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.726169 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.726179 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.726193 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.726202 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.829272 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.829672 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.829855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.829920 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.829944 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.932363 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.932600 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.932725 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.932812 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.932879 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:44Z","lastTransitionTime":"2025-10-09T13:26:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.964249 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:44 crc kubenswrapper[4762]: E1009 13:26:44.964620 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.964525 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:44 crc kubenswrapper[4762]: E1009 13:26:44.964875 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.964741 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:44 crc kubenswrapper[4762]: E1009 13:26:44.965061 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.964490 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:44 crc kubenswrapper[4762]: E1009 13:26:44.965259 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:44 crc kubenswrapper[4762]: I1009 13:26:44.987337 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:44Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.008147 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.021309 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.032663 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.035124 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.035166 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.035196 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.035212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.035222 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.046898 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.061319 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.076814 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.089576 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.115725 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.133233 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.138068 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.138204 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.138293 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.138384 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.138549 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.144866 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.158539 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.171937 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.186422 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.202936 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.214502 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.226728 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:45Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.241380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.241421 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.241431 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.241446 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.241457 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.344409 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.344460 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.344471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.344492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.344504 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.447379 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.447427 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.447437 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.447451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.447462 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.550309 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.550351 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.550361 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.550379 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.550390 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.654690 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.654757 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.654776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.654801 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.654819 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.757718 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.757822 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.757865 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.757907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.757934 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.861290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.861395 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.861416 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.861440 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.861501 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.965199 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.965251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.965269 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.965290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:45 crc kubenswrapper[4762]: I1009 13:26:45.965308 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:45Z","lastTransitionTime":"2025-10-09T13:26:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.068055 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.068125 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.068148 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.068174 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.068195 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.170549 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.170746 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.170773 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.170799 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.170815 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.273596 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.273955 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.274128 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.274300 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.274506 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.378328 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.378382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.378400 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.378432 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.378453 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.481190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.481253 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.481270 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.481297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.481321 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.584101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.584177 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.584198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.584226 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.584246 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.687895 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.687934 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.687946 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.687961 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.687971 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.790928 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.791774 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.791806 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.791825 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.791838 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.895490 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.895547 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.895556 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.895573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.895585 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.964706 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.964762 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.964837 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:46 crc kubenswrapper[4762]: E1009 13:26:46.965027 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.965095 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:46 crc kubenswrapper[4762]: E1009 13:26:46.965241 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:46 crc kubenswrapper[4762]: E1009 13:26:46.965362 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:46 crc kubenswrapper[4762]: E1009 13:26:46.965456 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.998085 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.998152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.998221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.998288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:46 crc kubenswrapper[4762]: I1009 13:26:46.998314 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:46Z","lastTransitionTime":"2025-10-09T13:26:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.101763 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.101827 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.101845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.101869 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.101886 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.205145 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.205223 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.205241 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.205264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.205282 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.307754 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.307806 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.307820 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.307838 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.307852 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.410769 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.410847 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.410873 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.410902 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.410924 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.513442 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.513514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.513533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.513554 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.513569 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.616302 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.616353 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.616369 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.616391 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.616409 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.719719 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.719785 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.719806 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.719831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.719848 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.823038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.823761 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.823798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.823832 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.823856 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.926539 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.927032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.927120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.927200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.927276 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:47Z","lastTransitionTime":"2025-10-09T13:26:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:47 crc kubenswrapper[4762]: I1009 13:26:47.966726 4762 scope.go:117] "RemoveContainer" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.031132 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.031573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.031616 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.031664 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.031680 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.135329 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.135387 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.135403 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.135426 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.135443 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.237366 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.237407 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.237418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.237435 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.237447 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.340225 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.340283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.340294 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.340310 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.340321 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.431405 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/2.log" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.435345 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.436076 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.443303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.443336 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.443345 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.443358 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.443369 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.457030 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.475245 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.492876 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.506139 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.525179 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.538785 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.549024 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.549086 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.549103 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.549128 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.549146 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.554257 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.569415 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.591391 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.608964 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.620168 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.642909 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.651448 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.651488 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.651502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.651522 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.651538 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.654962 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.667873 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.682589 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.695864 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.711346 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:48Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.755447 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.755492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.755507 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.755525 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.755536 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.821730 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.821856 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.821907 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.822059 4762 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.822123 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:27:52.822108006 +0000 UTC m=+148.595899045 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.822347 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:52.822340532 +0000 UTC m=+148.596131571 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.822423 4762 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.822525 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 13:27:52.822514217 +0000 UTC m=+148.596305256 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.858511 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.859583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.859764 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.859923 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.860017 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.922975 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.923343 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.923138 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.923666 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.923769 4762 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.923928 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 13:27:52.923902841 +0000 UTC m=+148.697693890 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.923503 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.924178 4762 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.924286 4762 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.924447 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 13:27:52.924430856 +0000 UTC m=+148.698221905 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.963390 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.963473 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.963498 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.963528 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.963551 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:48Z","lastTransitionTime":"2025-10-09T13:26:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.964063 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.964134 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.964159 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.964366 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.964357 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.964357 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.964700 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:48 crc kubenswrapper[4762]: E1009 13:26:48.964822 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:48 crc kubenswrapper[4762]: I1009 13:26:48.975113 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.066205 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.066241 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.066252 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.066268 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.066278 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.169143 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.169184 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.169195 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.169211 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.169251 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.272552 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.272889 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.272956 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.273320 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.273396 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.375919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.375968 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.375985 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.376009 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.376025 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.478457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.478495 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.478504 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.478518 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.478528 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.581335 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.581397 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.581418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.581443 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.581461 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.684734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.684810 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.684849 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.684886 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.684909 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.788493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.788550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.788568 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.788591 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.788608 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.892664 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.892711 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.892725 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.892744 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.892759 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.995388 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.995440 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.995453 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.995473 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:49 crc kubenswrapper[4762]: I1009 13:26:49.995484 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:49Z","lastTransitionTime":"2025-10-09T13:26:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.098872 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.098953 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.098969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.099000 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.099022 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.202346 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.202405 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.202422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.202445 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.202462 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.306027 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.306092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.306114 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.306144 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.306168 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.408579 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.408697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.408719 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.408742 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.408760 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.444834 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/3.log" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.445783 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/2.log" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.450001 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" exitCode=1 Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.450041 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.450082 4762 scope.go:117] "RemoveContainer" containerID="1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.452381 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:26:50 crc kubenswrapper[4762]: E1009 13:26:50.452749 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.478495 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.494260 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.509095 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.510844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.510994 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.511119 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.511226 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.511326 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.534260 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.556262 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.571922 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.587866 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.601507 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.614422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.614457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.614466 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.614481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.614490 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.617753 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.632192 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.650048 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.663180 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.684475 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:49Z\\\",\\\"message\\\":\\\"ft-marketplace/marketplace-operator-metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nF1009 13:26:49.799608 6832 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:49.798820 6832 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1009 13:26:49.799620 6832 services_controller.go:451] Built service openshift-marketplace/marketplace-operator-metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/marketplace-operator-metrics_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.697603 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.707023 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55e33db5-e82b-4449-9a01-265a7d9b74c3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5e61326d740880d50b889eb69f101fcc0e40a557ec6d8e76ce770ec7d456fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.715566 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.716907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.716964 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.716983 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.717014 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.717044 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.733264 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.744709 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:50Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.820402 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.820457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.820467 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.820481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.820491 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.922390 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.922428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.922437 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.922451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.922460 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:50Z","lastTransitionTime":"2025-10-09T13:26:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.964443 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:50 crc kubenswrapper[4762]: E1009 13:26:50.964561 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.964623 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.964845 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:50 crc kubenswrapper[4762]: E1009 13:26:50.964837 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:50 crc kubenswrapper[4762]: I1009 13:26:50.964969 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:50 crc kubenswrapper[4762]: E1009 13:26:50.965050 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:50 crc kubenswrapper[4762]: E1009 13:26:50.965142 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.025970 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.026027 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.026046 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.026078 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.026098 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.128727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.128765 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.128775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.128789 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.128820 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.231396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.231430 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.231438 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.231452 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.231462 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.334284 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.334323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.334333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.334346 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.334357 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.436809 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.436869 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.436878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.436896 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.436908 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.454863 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/3.log" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.540232 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.540296 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.540305 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.540340 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.540353 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.642687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.643035 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.643047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.643065 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.643078 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.745861 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.745902 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.745914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.745930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.745939 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.848655 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.848687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.848696 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.848710 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.848726 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.951442 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.951475 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.951485 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.951502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:51 crc kubenswrapper[4762]: I1009 13:26:51.951514 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:51Z","lastTransitionTime":"2025-10-09T13:26:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.054376 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.054430 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.054447 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.055005 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.055034 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.155369 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.155437 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.155447 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.155463 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.155474 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.173585 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.177962 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.178002 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.178011 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.178028 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.178038 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.195059 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.199669 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.199702 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.199713 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.199730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.199744 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.215245 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.219230 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.219297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.219318 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.219342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.219360 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.233445 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.238225 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.238282 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.238302 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.238324 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.238342 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.257533 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:52Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.257800 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.260048 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.260178 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.260198 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.260226 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.260244 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.362788 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.362863 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.362875 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.362894 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.362926 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.465736 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.465774 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.465784 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.465798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.465812 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.568983 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.569031 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.569047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.569073 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.569090 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.672017 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.672092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.672111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.672137 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.672157 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.774603 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.774652 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.774662 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.774696 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.774707 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.880892 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.881013 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.881036 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.881152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.881175 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.964708 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.964758 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.964790 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.964851 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.964985 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.965127 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.965270 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:52 crc kubenswrapper[4762]: E1009 13:26:52.965371 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.984700 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.984758 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.984776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.984801 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:52 crc kubenswrapper[4762]: I1009 13:26:52.984819 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:52Z","lastTransitionTime":"2025-10-09T13:26:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.086959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.087028 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.087038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.087055 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.087066 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.189398 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.189466 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.189489 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.189514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.189530 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.292365 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.292651 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.292725 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.292816 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.292914 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.395283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.395512 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.395586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.395681 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.395759 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.497734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.497797 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.497815 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.497839 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.497900 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.600298 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.600587 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.600698 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.600816 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.600989 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.703782 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.703831 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.703842 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.703862 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.703877 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.806573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.806620 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.806645 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.806667 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.806679 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.909048 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.909078 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.909088 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.909101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:53 crc kubenswrapper[4762]: I1009 13:26:53.909111 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:53Z","lastTransitionTime":"2025-10-09T13:26:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.011545 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.011590 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.011599 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.011613 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.011622 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.113880 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.113912 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.113922 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.113935 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.113944 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.215752 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.215787 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.215796 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.215810 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.215819 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.318865 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.318914 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.318924 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.318939 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.318952 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.421444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.421484 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.421493 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.421508 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.421519 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.523502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.523548 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.523560 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.523575 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.523587 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.625899 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.625943 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.625951 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.625966 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.625975 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.729092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.729136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.729144 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.729162 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.729171 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.832598 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.832727 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.832753 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.832786 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.832809 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.936002 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.936069 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.936079 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.936096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.936106 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:54Z","lastTransitionTime":"2025-10-09T13:26:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.964841 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.964892 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.965045 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:54 crc kubenswrapper[4762]: E1009 13:26:54.965048 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.965116 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:54 crc kubenswrapper[4762]: E1009 13:26:54.965227 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:54 crc kubenswrapper[4762]: E1009 13:26:54.965307 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:54 crc kubenswrapper[4762]: E1009 13:26:54.965374 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.978063 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:54Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:54 crc kubenswrapper[4762]: I1009 13:26:54.990494 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:54Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.009273 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.027159 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.039004 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.039062 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.039077 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.039098 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.039113 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.040227 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.053926 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.075794 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.089698 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.104095 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.131596 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:49Z\\\",\\\"message\\\":\\\"ft-marketplace/marketplace-operator-metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nF1009 13:26:49.799608 6832 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:49.798820 6832 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1009 13:26:49.799620 6832 services_controller.go:451] Built service openshift-marketplace/marketplace-operator-metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/marketplace-operator-metrics_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.141678 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.141712 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.141721 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.141734 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.141744 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.149475 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.160650 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55e33db5-e82b-4449-9a01-265a7d9b74c3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5e61326d740880d50b889eb69f101fcc0e40a557ec6d8e76ce770ec7d456fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.172673 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.188269 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.200049 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.213017 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.227724 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.238711 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:55Z is after 2025-08-24T17:21:41Z" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.244139 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.244176 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.244186 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.244208 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.244218 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.346984 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.347046 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.347058 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.347073 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.347082 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.449499 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.449544 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.449553 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.449568 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.449578 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.552117 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.552174 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.552191 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.552214 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.552233 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.655210 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.655247 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.655257 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.655274 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.655285 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.757768 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.757857 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.757874 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.757895 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.757969 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.862376 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.862438 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.862459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.862487 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.862507 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.965590 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.965718 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.965749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.965778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.965801 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:55Z","lastTransitionTime":"2025-10-09T13:26:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:55 crc kubenswrapper[4762]: I1009 13:26:55.979181 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.068052 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.068084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.068094 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.068109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.068120 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.169963 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.170210 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.170295 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.170457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.170534 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.273555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.273883 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.274095 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.274369 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.274460 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.376729 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.376775 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.376785 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.376800 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.376811 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.479330 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.479400 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.479420 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.479451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.479471 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.582574 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.582649 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.582666 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.582689 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.582700 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.685136 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.685190 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.685200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.685218 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.685230 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.788154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.788231 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.788254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.788282 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.788302 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.891449 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.891502 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.891511 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.891529 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.891539 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.964854 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:56 crc kubenswrapper[4762]: E1009 13:26:56.965334 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.964947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:56 crc kubenswrapper[4762]: E1009 13:26:56.965869 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.964857 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.964948 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:56 crc kubenswrapper[4762]: E1009 13:26:56.966403 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:56 crc kubenswrapper[4762]: E1009 13:26:56.966272 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.994305 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.994357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.994374 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.994398 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:56 crc kubenswrapper[4762]: I1009 13:26:56.994425 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:56Z","lastTransitionTime":"2025-10-09T13:26:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.096540 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.096583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.096595 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.096609 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.096691 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.199235 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.199293 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.199311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.199333 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.199351 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.302482 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.302533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.302549 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.302573 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.302590 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.405397 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.405439 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.405448 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.405464 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.405474 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.508125 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.508200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.508224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.508253 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.508277 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.611533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.611596 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.611606 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.611629 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.611734 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.714885 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.714959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.715001 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.715038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.715062 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.818264 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.818348 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.818360 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.818380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.818397 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.921192 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.921251 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.921260 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.921277 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:57 crc kubenswrapper[4762]: I1009 13:26:57.921286 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:57Z","lastTransitionTime":"2025-10-09T13:26:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.023922 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.023957 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.023967 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.023981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.023991 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.126479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.126525 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.126534 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.126549 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.126558 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.229126 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.229595 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.229698 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.229796 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.229864 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.332380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.332809 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.333043 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.333207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.333334 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.436693 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.436745 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.436763 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.436787 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.436808 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.539757 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.540110 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.540239 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.540366 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.540500 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.647323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.647371 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.647381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.647396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.647408 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.750457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.750526 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.750553 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.750583 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.750606 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.853301 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.853400 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.853419 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.853440 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.853449 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.956786 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.956851 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.956866 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.956893 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.956910 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:58Z","lastTransitionTime":"2025-10-09T13:26:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.964512 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.964572 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.964576 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:26:58 crc kubenswrapper[4762]: E1009 13:26:58.964694 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:26:58 crc kubenswrapper[4762]: I1009 13:26:58.964724 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:26:58 crc kubenswrapper[4762]: E1009 13:26:58.964866 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:26:58 crc kubenswrapper[4762]: E1009 13:26:58.964925 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:26:58 crc kubenswrapper[4762]: E1009 13:26:58.965021 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.059782 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.060141 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.060218 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.060303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.060379 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.162749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.162789 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.162802 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.162821 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.162832 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.265300 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.265334 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.265343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.265358 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.265370 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.368200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.368254 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.368280 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.368307 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.368326 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.470528 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.470584 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.470619 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.470847 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.470874 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.574283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.574342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.574359 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.574380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.574392 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.677546 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.677586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.677598 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.677614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.677623 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.780269 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.780304 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.780313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.780326 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.780335 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.882429 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.882483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.882509 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.882524 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.882532 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.985091 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.985124 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.985133 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.985152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:26:59 crc kubenswrapper[4762]: I1009 13:26:59.985163 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:26:59Z","lastTransitionTime":"2025-10-09T13:26:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.089687 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.089988 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.090112 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.090242 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.090340 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.193542 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.193602 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.193618 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.193667 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.193685 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.296767 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.296820 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.296835 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.296854 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.296866 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.399592 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.399675 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.399685 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.399699 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.399708 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.502267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.502323 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.502339 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.502362 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.502379 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.605451 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.605504 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.605521 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.605543 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.605561 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.708159 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.708221 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.708260 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.708292 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.708318 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.810798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.811105 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.811207 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.811332 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.811466 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.914096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.914165 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.914182 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.914206 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.914227 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:00Z","lastTransitionTime":"2025-10-09T13:27:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.964980 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.965072 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.965090 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:00 crc kubenswrapper[4762]: E1009 13:27:00.965209 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:00 crc kubenswrapper[4762]: E1009 13:27:00.965327 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:00 crc kubenswrapper[4762]: E1009 13:27:00.965466 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:00 crc kubenswrapper[4762]: I1009 13:27:00.965737 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:00 crc kubenswrapper[4762]: E1009 13:27:00.966047 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.017492 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.017566 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.017591 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.017623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.017687 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.119961 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.119996 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.120006 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.120022 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.120033 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.223041 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.223090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.223104 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.223121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.223132 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.326599 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.326722 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.326735 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.326757 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.326772 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.428913 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.428947 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.428959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.428975 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.428985 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.531837 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.531936 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.531962 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.531992 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.532015 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.635152 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.635205 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.635217 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.635236 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.635248 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.738144 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.738203 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.738222 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.738249 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.738273 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.840917 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.840997 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.841020 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.841052 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.841074 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.944405 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.944457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.944474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.944497 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:01 crc kubenswrapper[4762]: I1009 13:27:01.944514 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:01Z","lastTransitionTime":"2025-10-09T13:27:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.046838 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.046930 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.046942 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.046969 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.046984 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.150356 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.150404 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.150416 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.150435 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.150447 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.253797 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.253879 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.253901 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.253927 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.253947 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.356743 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.356788 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.356798 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.356813 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.356825 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.459993 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.460022 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.460033 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.460047 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.460057 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.469990 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.470042 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.470061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.470086 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.470124 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.492462 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:02Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.497886 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.497976 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.498003 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.498032 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.498057 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.520649 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:02Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.526195 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.526245 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.526261 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.526281 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.526295 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.548954 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:02Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.554170 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.554209 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.554223 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.554248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.554265 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.572167 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:02Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.577825 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.577882 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.577905 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.577932 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.577950 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.596021 4762 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T13:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9a48ddb1-1645-4cf1-ba92-96ea5fd03a1b\\\",\\\"systemUUID\\\":\\\"cb0479c9-186e-453b-880a-de1db201ede6\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:02Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.596250 4762 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.598267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.598342 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.598353 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.598371 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.598382 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.674136 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.674288 4762 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.674723 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs podName:f9a76399-c2ae-487b-a52c-f0e271fb1d20 nodeName:}" failed. No retries permitted until 2025-10-09 13:28:06.674692688 +0000 UTC m=+162.448483737 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs") pod "network-metrics-daemon-k4bwn" (UID: "f9a76399-c2ae-487b-a52c-f0e271fb1d20") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.701217 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.701311 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.701328 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.701348 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.701363 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.803535 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.803834 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.803933 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.804007 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.804072 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.907092 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.907158 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.907180 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.907210 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.907230 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:02Z","lastTransitionTime":"2025-10-09T13:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.965261 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.965449 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.965287 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.965573 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:02 crc kubenswrapper[4762]: I1009 13:27:02.965861 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.965972 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.966059 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:02 crc kubenswrapper[4762]: E1009 13:27:02.966482 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.009390 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.009446 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.009457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.009474 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.009488 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.112041 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.112111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.112123 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.112141 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.112152 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.214449 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.214512 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.214521 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.214539 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.214548 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.316967 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.317488 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.317569 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.317668 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.317747 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.420747 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.420820 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.420844 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.420875 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.420898 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.523108 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.523140 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.523174 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.523188 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.523197 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.626228 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.626290 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.626315 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.626349 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.626370 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.730271 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.730391 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.730415 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.730447 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.730466 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.833126 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.833212 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.833234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.833258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.833276 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.935987 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.936038 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.936048 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.936066 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:03 crc kubenswrapper[4762]: I1009 13:27:03.936080 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:03Z","lastTransitionTime":"2025-10-09T13:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.038949 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.038989 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.039001 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.039014 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.039023 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.141714 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.141749 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.141762 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.141776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.141788 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.244382 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.244444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.244459 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.244484 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.244502 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.347571 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.347656 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.347673 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.347698 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.347716 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.451083 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.451129 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.451139 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.451154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.451168 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.554271 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.554343 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.554354 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.554377 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.554394 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.657884 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.658195 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.658328 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.658428 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.658509 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.761154 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.761541 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.761795 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.762196 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.762493 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.865570 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.865648 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.865663 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.865679 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.865691 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.964095 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:04 crc kubenswrapper[4762]: E1009 13:27:04.964233 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.964298 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:04 crc kubenswrapper[4762]: E1009 13:27:04.964458 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.968517 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.970007 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:04 crc kubenswrapper[4762]: E1009 13:27:04.970360 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.970452 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:04 crc kubenswrapper[4762]: E1009 13:27:04.970447 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:04 crc kubenswrapper[4762]: E1009 13:27:04.970583 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.972533 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.972588 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.972603 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.972623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.972658 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:04Z","lastTransitionTime":"2025-10-09T13:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:04 crc kubenswrapper[4762]: I1009 13:27:04.982385 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:04Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.000655 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:04Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.027948 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168b74aa-f734-410a-b03c-d8931db8bb97\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://025917894eab9a67ab97ec3d33e3e2b9e1313925a893b0e18026e1e0f155e8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f970257c84754803de27650f6dcdba8432098673157e828a41dfb4659af2454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://232dc8813083fa0b44df5f73c0ab49aa56d62eda083f31ebec6c543227e48394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92eac49b615ef93fb3fa9544df792c0a1f6b175cc385469b69d9e4a176fdca06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://516131bb20004a365fbe66980711fd290d4eb46c4834e139efcda05b39195ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.047145 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.064106 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.074789 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.074871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.074889 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.074947 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.074967 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.082837 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.099588 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.114727 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.130653 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.146293 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.166712 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dc4404ab9e73409695946479f2738c0f0af702da13af2d1995bf63ef0e493d0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:20Z\\\",\\\"message\\\":\\\"ble to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:20Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:20.768193 6440 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-kube-controller-manager-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"3ec9f67e-7758-4707-a6d0-2dc28f28ac37\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-kube-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterL\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:49Z\\\",\\\"message\\\":\\\"ft-marketplace/marketplace-operator-metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nF1009 13:26:49.799608 6832 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:49.798820 6832 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1009 13:26:49.799620 6832 services_controller.go:451] Built service openshift-marketplace/marketplace-operator-metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/marketplace-operator-metrics_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.177604 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.177682 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.177692 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.177709 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.177719 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.187117 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.200621 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55e33db5-e82b-4449-9a01-265a7d9b74c3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5e61326d740880d50b889eb69f101fcc0e40a557ec6d8e76ce770ec7d456fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.213079 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.228407 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.242520 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.254967 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.274846 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.279997 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.280025 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.280035 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.280052 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.280065 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.285831 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.297986 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59b56cd547bd0e2f72ad712b04651a4ae65cf3a0df7865c0b0c16478261a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.312122 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.333294 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"92662de9-9784-432a-92d2-a668f815e8fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:49Z\\\",\\\"message\\\":\\\"ft-marketplace/marketplace-operator-metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nF1009 13:26:49.799608 6832 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:26:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 13:26:49.798820 6832 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1009 13:26:49.799620 6832 services_controller.go:451] Built service openshift-marketplace/marketplace-operator-metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-marketplace/marketplace-operator-metrics_TCP_cluster\\\\\\\", UUI\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:26:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg6s6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jl67g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.349218 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804a251f-1e3f-4b9e-af10-eefa332e6e98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb1fed96810d93e59d24d69fb8a2eb9974e8a5e524465daea3ff2cca16f7226c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ad35bed43586dfec502056ecb5226049a7fb25461c2774fb5377102fd2ce85d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d3cc3e07b5fc9069f7faa521fa94efde90d9c79940876a93849c44a14327e56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a492a7a956d54533afbd3869796642235d0c8ca621c550c14eeab5e988fc4364\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.367161 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c46f24d4579b9ef6d5f7a351830fc24872a571e6bc26a163bb5ace1e688037\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.380655 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2vkbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"263d57f9-b10b-4ce1-adad-774600b977d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://732b66ff58c48b0703e0fd4585768652035af6797f66b586fc6f17ef3937d9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gmcr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2vkbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.382779 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.382838 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.382855 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.382878 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.382891 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.396308 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9wtqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c847aae6-277a-45dc-86d0-9b175f7e8177\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:26:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T13:26:32Z\\\",\\\"message\\\":\\\"2025-10-09T13:25:46+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1\\\\n2025-10-09T13:25:46+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cd21724e-0215-496f-b682-23c2705fa0b1 to /host/opt/cni/bin/\\\\n2025-10-09T13:25:47Z [verbose] multus-daemon started\\\\n2025-10-09T13:25:47Z [verbose] Readiness Indicator file check\\\\n2025-10-09T13:26:32Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:26:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2kljt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9wtqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.410400 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf0dada3-5765-4a2e-b28a-f9291c2d6428\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6368fc75a63ebf7915390457c69a6a9b77e19726ba182437f5c616bea12245f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeff516f9d81e48c8bd350da51ebffc8f6f031f06100a5264cb7aca04674f79f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8xpd4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fx92z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.430000 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bd1ecbd-1492-4e6a-87e8-1c913e084d9d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9ff1efe69d256b491a039e5f35442c087ce3b52fc7abf98b338e24c3e020b99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb2071dd369674ca2de7de56dd1250c763b8733d72889b60eff864774dc3d81b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e44b6f2021a1a4ccd714f86443c7cc235b9d77cd455e68f7e042281ff0917569\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f33b9070a56fe51d2f39d9d509fc8cea2fada696703209c911b75f5c8f53e96d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9179f90a1a9a3c70467429b0471320ccf51b67f27c4d28d22ebc477cedab17d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 13:25:38.564754 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 13:25:38.572923 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2393231961/tls.crt::/tmp/serving-cert-2393231961/tls.key\\\\\\\"\\\\nI1009 13:25:44.418137 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 13:25:44.425303 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 13:25:44.425330 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 13:25:44.425348 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 13:25:44.425353 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 13:25:44.434300 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 13:25:44.434319 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 13:25:44.434323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434340 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 13:25:44.434344 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 13:25:44.434347 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 13:25:44.434350 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 13:25:44.434353 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 13:25:44.436492 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://281873dc615f940d39a13cb0a18a2eb34eb7de3f9773d8845183edeb89d430f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b0cc5e4351c64f1a4f07f8ec87ea48ddab393d4ac64228e8fbf20d3259fb630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.445816 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55e33db5-e82b-4449-9a01-265a7d9b74c3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d5e61326d740880d50b889eb69f101fcc0e40a557ec6d8e76ce770ec7d456fff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21b9ead59eaf920882c1503023924b621376395c684e0c7490490b77abfc18aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.463010 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bj499" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b164d4fd-19df-4902-971f-5efe403e61e0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab43a3e93731e2de1e0260a1933cc78188e7b561cb450d645c7b4fc00b691ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqf4b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:51Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bj499\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.480201 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.485029 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.485083 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.485097 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.485120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.485131 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.499105 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf8b8ba7-96cd-4cdd-9925-94dd98242050\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ff1d6f7e6b424ebf8005fa3d140897100a8e7ed0095e7af05531cf5ad9f69b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38f8c3c9395cbaf4a6426349a070b2d3b4ba4f83af8f5272a33d617f456c2e38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a83c8fa2a9b44c19879eaa27ee0aceb5aa4f0c2d70347e497a62fc1ca236807\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ece4883665cbaf9a3045d5ddde5584cc4cd6c3acb5a8bb3acbf7eaee644796e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1aae59e2ff2b1d48f9d2b5d13c30ccc155f0ba8e2657ec99020d6c6a8977495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96f1ff0c4b2b07617d5ea81b1d88ea74124e4739d667f1ad72eb36dad3d48e7f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa9e24a4bd37675e1c6b4f6ba08d60b722add0bbcdf9eb51ace2e3fb35143d0d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n248\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n6lnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.528201 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.546523 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d2fc9df0c1cc14b4b9f8caff51e87059aeffaa2daeeb271d55585f7297d003c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4194b9211ce31f24383b3cabd274bfb9afef0c56583b802cb2c934ba81b05c43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.561703 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"366049a3-acf6-488c-9f93-4557528d6d14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbaae79b08d94e58f88c25cf641c2c24edc8f8ed5d5ffbf5fd3c68b24246a964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4hhqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5v6hv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.574104 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9a76399-c2ae-487b-a52c-f0e271fb1d20\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2vrjl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:58Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k4bwn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.587122 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.587171 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.587183 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.587200 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.587212 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.588812 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0d2d4c-667f-43da-8074-b6e14823b755\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78a7e23eb6d5024d626963a06cf5790fcd6c7c17c82c823b2650c55273e427fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f807722a8b6059afed30f7f1fd32bcc168b8bf9d5eee02d74a42ab70ae5ff048\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1706f7f512083d1da015da3c7cd09c6aa4d497b83f8dfcd4ce0e8e966aa00b37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd9c69d0be5e859f86da1745bdf82f003681f064e2580bfd454e6ba875bdcb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.610601 4762 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168b74aa-f734-410a-b03c-d8931db8bb97\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T13:25:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://025917894eab9a67ab97ec3d33e3e2b9e1313925a893b0e18026e1e0f155e8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f970257c84754803de27650f6dcdba8432098673157e828a41dfb4659af2454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://232dc8813083fa0b44df5f73c0ab49aa56d62eda083f31ebec6c543227e48394\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92eac49b615ef93fb3fa9544df792c0a1f6b175cc385469b69d9e4a176fdca06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://516131bb20004a365fbe66980711fd290d4eb46c4834e139efcda05b39195ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T13:25:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9be8d76182b45f0dad1e30345a25c5134d579273a7a2d7fdf757051ad650e5dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://004fd5e1204b81d04c962a262ed8496bb94c8d3ac524afc6089b019afddd710f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:27Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7078aa7267a6d6436d943691ce8993f4427412e4adbf7d769309410f0e3bf71d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T13:25:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T13:25:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T13:25:25Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T13:27:05Z is after 2025-08-24T17:21:41Z" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.689720 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.690071 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.690224 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.690364 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.690501 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.793472 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.793513 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.793523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.793537 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.793547 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.896289 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.896673 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.896818 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.896957 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:05 crc kubenswrapper[4762]: I1009 13:27:05.897087 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:05Z","lastTransitionTime":"2025-10-09T13:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.000998 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.001082 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.001109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.001140 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.001165 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.103561 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.103601 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.103609 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.103623 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.103656 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.206868 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.206919 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.206937 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.206959 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.206974 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.310605 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.310720 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.310745 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.310776 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.310798 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.413321 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.413379 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.413396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.413418 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.413435 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.515845 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.515899 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.515910 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.515928 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.515939 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.617843 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.617883 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.617893 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.617907 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.617918 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.719981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.720030 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.720046 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.720066 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.720081 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.823460 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.823506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.823521 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.823543 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.823557 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.926598 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.926722 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.926748 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.926778 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.926800 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:06Z","lastTransitionTime":"2025-10-09T13:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.964211 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.964261 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.964351 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:06 crc kubenswrapper[4762]: E1009 13:27:06.964580 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:06 crc kubenswrapper[4762]: I1009 13:27:06.964674 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:06 crc kubenswrapper[4762]: E1009 13:27:06.964789 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:06 crc kubenswrapper[4762]: E1009 13:27:06.964900 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:06 crc kubenswrapper[4762]: E1009 13:27:06.964961 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.029009 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.029086 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.029109 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.029137 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.029158 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.130881 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.130915 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.130925 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.130939 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.130950 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.233231 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.233274 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.233283 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.233300 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.233309 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.336411 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.336483 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.336505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.336532 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.336553 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.439174 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.439226 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.439243 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.439266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.439283 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.542266 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.542319 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.542329 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.542345 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.542355 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.645081 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.645140 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.645157 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.645181 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.645199 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.748398 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.748759 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.748860 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.748967 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.749050 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.851383 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.851433 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.851444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.851460 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.851499 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.955121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.955153 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.955163 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.955177 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:07 crc kubenswrapper[4762]: I1009 13:27:07.955187 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:07Z","lastTransitionTime":"2025-10-09T13:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.058506 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.058550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.058559 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.058574 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.058584 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.161025 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.161096 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.161120 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.161151 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.161177 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.264248 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.264297 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.264313 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.264336 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.264352 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.367444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.367505 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.367523 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.367548 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.367570 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.470159 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.470234 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.470250 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.470267 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.470279 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.572819 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.572871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.572891 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.572916 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.572956 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.675423 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.675461 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.675471 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.675487 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.675496 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.777911 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.777952 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.777965 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.777982 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.777996 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.881422 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.881481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.881499 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.881526 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.881547 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.964474 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:08 crc kubenswrapper[4762]: E1009 13:27:08.964611 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.964706 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:08 crc kubenswrapper[4762]: E1009 13:27:08.964768 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.965106 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.965201 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:08 crc kubenswrapper[4762]: E1009 13:27:08.965381 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:08 crc kubenswrapper[4762]: E1009 13:27:08.965862 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.984080 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.984145 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.984162 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.984188 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:08 crc kubenswrapper[4762]: I1009 13:27:08.984205 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:08Z","lastTransitionTime":"2025-10-09T13:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.087153 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.087236 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.087258 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.087288 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.087308 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.189438 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.189489 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.189500 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.189520 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.189556 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.292882 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.292960 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.293035 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.293106 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.293119 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.396083 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.396157 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.396173 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.396197 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.396213 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.498795 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.498852 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.498871 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.498894 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.498910 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.601849 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.601929 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.601949 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.601973 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.601990 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.704409 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.704465 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.704481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.704504 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.704522 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.807018 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.807061 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.807072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.807090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.807103 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.909713 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.909754 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.909766 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.909780 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:09 crc kubenswrapper[4762]: I1009 13:27:09.909792 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:09Z","lastTransitionTime":"2025-10-09T13:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.012303 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.012606 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.012717 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.012808 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.012884 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.115993 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.116050 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.116067 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.116090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.116107 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.218773 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.218809 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.218819 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.218833 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.218845 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.321394 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.321433 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.321444 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.321457 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.321467 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.423861 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.424277 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.424477 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.424664 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.424860 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.528380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.529227 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.529357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.529487 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.529620 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.632357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.632417 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.632431 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.632445 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.632453 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.735576 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.735998 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.736240 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.736450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.736601 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.839504 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.839559 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.839575 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.839599 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.839616 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.942610 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.942730 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.942753 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.942782 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.942799 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:10Z","lastTransitionTime":"2025-10-09T13:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.964804 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.964939 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:10 crc kubenswrapper[4762]: E1009 13:27:10.965384 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.965032 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:10 crc kubenswrapper[4762]: E1009 13:27:10.965469 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:10 crc kubenswrapper[4762]: I1009 13:27:10.965002 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:10 crc kubenswrapper[4762]: E1009 13:27:10.965551 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:10 crc kubenswrapper[4762]: E1009 13:27:10.966113 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.045325 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.045369 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.045380 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.045396 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.045408 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.147981 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.148057 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.148081 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.148111 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.148133 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.250805 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.250846 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.250856 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.250870 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.250882 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.353697 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.353799 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.353816 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.353839 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.353857 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.456514 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.456555 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.456564 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.456580 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.456591 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.559498 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.559565 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.559586 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.559614 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.559662 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.662033 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.662079 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.662090 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.662103 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.662113 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.765026 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.765072 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.765084 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.765101 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.765111 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.867450 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.867491 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.867501 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.867515 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.867525 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.970121 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.970185 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.970197 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.970210 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:11 crc kubenswrapper[4762]: I1009 13:27:11.970219 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:11Z","lastTransitionTime":"2025-10-09T13:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.073579 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.073621 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.073655 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.073671 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.073684 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.176135 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.176173 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.176183 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.176197 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.176213 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.278481 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.278524 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.278535 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.278550 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.278562 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.380589 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.380624 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.380645 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.380661 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.380670 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.483024 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.483279 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.483381 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.483479 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.483557 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.586552 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.587473 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.587684 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.587872 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.588045 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.691345 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.691393 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.691405 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.691420 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.691431 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.794467 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.794510 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.794520 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.794535 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.794546 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.836316 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.836357 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.836368 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.836385 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.836398 4762 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T13:27:12Z","lastTransitionTime":"2025-10-09T13:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.885091 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn"] Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.886329 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.890160 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.890593 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.890755 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.890809 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.915625 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.915608391 podStartE2EDuration="1m28.915608391s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:12.915145937 +0000 UTC m=+108.688936996" watchObservedRunningTime="2025-10-09 13:27:12.915608391 +0000 UTC m=+108.689399430" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.946295 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=24.946270711 podStartE2EDuration="24.946270711s" podCreationTimestamp="2025-10-09 13:26:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:12.933471326 +0000 UTC m=+108.707262365" watchObservedRunningTime="2025-10-09 13:27:12.946270711 +0000 UTC m=+108.720061750" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.946568 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2vkbh" podStartSLOduration=88.94656366 podStartE2EDuration="1m28.94656366s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:12.944845842 +0000 UTC m=+108.718636881" watchObservedRunningTime="2025-10-09 13:27:12.94656366 +0000 UTC m=+108.720354699" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.964832 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.964966 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:12 crc kubenswrapper[4762]: E1009 13:27:12.965102 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.965369 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:12 crc kubenswrapper[4762]: E1009 13:27:12.965446 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:12 crc kubenswrapper[4762]: E1009 13:27:12.965610 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.965932 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:12 crc kubenswrapper[4762]: E1009 13:27:12.966493 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.967073 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-9wtqb" podStartSLOduration=88.967058828 podStartE2EDuration="1m28.967058828s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:12.966932995 +0000 UTC m=+108.740724034" watchObservedRunningTime="2025-10-09 13:27:12.967058828 +0000 UTC m=+108.740849867" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.985452 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.985536 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.985571 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.985619 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.985718 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:12 crc kubenswrapper[4762]: I1009 13:27:12.986232 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fx92z" podStartSLOduration=87.98621617 podStartE2EDuration="1m27.98621617s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:12.986030825 +0000 UTC m=+108.759821874" watchObservedRunningTime="2025-10-09 13:27:12.98621617 +0000 UTC m=+108.760007209" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.041188 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-n6lnd" podStartSLOduration=89.041165855 podStartE2EDuration="1m29.041165855s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.04097393 +0000 UTC m=+108.814764969" watchObservedRunningTime="2025-10-09 13:27:13.041165855 +0000 UTC m=+108.814956894" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.053654 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-bj499" podStartSLOduration=88.053607891 podStartE2EDuration="1m28.053607891s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.053253461 +0000 UTC m=+108.827044500" watchObservedRunningTime="2025-10-09 13:27:13.053607891 +0000 UTC m=+108.827398950" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.067985 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.067962919 podStartE2EDuration="1m29.067962919s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.067895487 +0000 UTC m=+108.841686536" watchObservedRunningTime="2025-10-09 13:27:13.067962919 +0000 UTC m=+108.841753958" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.086984 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087032 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087051 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087073 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087106 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087216 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.087897 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.088158 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.097479 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.099147 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=18.099127945 podStartE2EDuration="18.099127945s" podCreationTimestamp="2025-10-09 13:26:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.098520998 +0000 UTC m=+108.872312057" watchObservedRunningTime="2025-10-09 13:27:13.099127945 +0000 UTC m=+108.872918994" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.107607 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rhwcn\" (UID: \"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.136838 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podStartSLOduration=89.136819501 podStartE2EDuration="1m29.136819501s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.136331287 +0000 UTC m=+108.910122336" watchObservedRunningTime="2025-10-09 13:27:13.136819501 +0000 UTC m=+108.910610540" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.161510 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=61.161480925 podStartE2EDuration="1m1.161480925s" podCreationTimestamp="2025-10-09 13:26:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.160679403 +0000 UTC m=+108.934470452" watchObservedRunningTime="2025-10-09 13:27:13.161480925 +0000 UTC m=+108.935272004" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.201182 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.527306 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" event={"ID":"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288","Type":"ContainerStarted","Data":"cf723d63c4f0791ae08e7ee281bb64229b214d6a3cdfc42bf4d6c91652d52c31"} Oct 09 13:27:13 crc kubenswrapper[4762]: I1009 13:27:13.527356 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" event={"ID":"8e35a0cd-4cde-4fd3-9324-d4ff9cf1d288","Type":"ContainerStarted","Data":"118be7b1f196d6a00e35a54870903e674382381c436ab2327f6602845bed0994"} Oct 09 13:27:14 crc kubenswrapper[4762]: I1009 13:27:14.964886 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:14 crc kubenswrapper[4762]: I1009 13:27:14.964945 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:14 crc kubenswrapper[4762]: E1009 13:27:14.965877 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:14 crc kubenswrapper[4762]: I1009 13:27:14.965971 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:14 crc kubenswrapper[4762]: I1009 13:27:14.966023 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:14 crc kubenswrapper[4762]: E1009 13:27:14.966168 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:14 crc kubenswrapper[4762]: E1009 13:27:14.966783 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:14 crc kubenswrapper[4762]: E1009 13:27:14.966919 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:15 crc kubenswrapper[4762]: I1009 13:27:15.966361 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:27:15 crc kubenswrapper[4762]: E1009 13:27:15.966761 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:27:16 crc kubenswrapper[4762]: I1009 13:27:16.965260 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:16 crc kubenswrapper[4762]: I1009 13:27:16.965349 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:16 crc kubenswrapper[4762]: I1009 13:27:16.965397 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:16 crc kubenswrapper[4762]: E1009 13:27:16.965445 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:16 crc kubenswrapper[4762]: I1009 13:27:16.965282 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:16 crc kubenswrapper[4762]: E1009 13:27:16.965511 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:16 crc kubenswrapper[4762]: E1009 13:27:16.965685 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:16 crc kubenswrapper[4762]: E1009 13:27:16.965870 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.544081 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/1.log" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.544722 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/0.log" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.544776 4762 generic.go:334] "Generic (PLEG): container finished" podID="c847aae6-277a-45dc-86d0-9b175f7e8177" containerID="0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af" exitCode=1 Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.544808 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerDied","Data":"0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af"} Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.544847 4762 scope.go:117] "RemoveContainer" containerID="cbc232c96b60c8678588d4902c8dfbf6fc0b30f8af768295c963aad3a9f4d644" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.545260 4762 scope.go:117] "RemoveContainer" containerID="0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af" Oct 09 13:27:18 crc kubenswrapper[4762]: E1009 13:27:18.545490 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-9wtqb_openshift-multus(c847aae6-277a-45dc-86d0-9b175f7e8177)\"" pod="openshift-multus/multus-9wtqb" podUID="c847aae6-277a-45dc-86d0-9b175f7e8177" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.578428 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rhwcn" podStartSLOduration=94.578409 podStartE2EDuration="1m34.578409s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:13.544848957 +0000 UTC m=+109.318639996" watchObservedRunningTime="2025-10-09 13:27:18.578409 +0000 UTC m=+114.352200049" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.964992 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.965059 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.965082 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:18 crc kubenswrapper[4762]: E1009 13:27:18.965113 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:18 crc kubenswrapper[4762]: E1009 13:27:18.965254 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:18 crc kubenswrapper[4762]: I1009 13:27:18.965292 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:18 crc kubenswrapper[4762]: E1009 13:27:18.965506 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:18 crc kubenswrapper[4762]: E1009 13:27:18.965601 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:19 crc kubenswrapper[4762]: I1009 13:27:19.550749 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/1.log" Oct 09 13:27:20 crc kubenswrapper[4762]: I1009 13:27:20.964777 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:20 crc kubenswrapper[4762]: I1009 13:27:20.964820 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:20 crc kubenswrapper[4762]: I1009 13:27:20.964875 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:20 crc kubenswrapper[4762]: I1009 13:27:20.964777 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:20 crc kubenswrapper[4762]: E1009 13:27:20.964906 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:20 crc kubenswrapper[4762]: E1009 13:27:20.965059 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:20 crc kubenswrapper[4762]: E1009 13:27:20.965117 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:20 crc kubenswrapper[4762]: E1009 13:27:20.965186 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:22 crc kubenswrapper[4762]: I1009 13:27:22.964926 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:22 crc kubenswrapper[4762]: I1009 13:27:22.965039 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:22 crc kubenswrapper[4762]: I1009 13:27:22.965110 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:22 crc kubenswrapper[4762]: I1009 13:27:22.965358 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:22 crc kubenswrapper[4762]: E1009 13:27:22.965507 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:22 crc kubenswrapper[4762]: E1009 13:27:22.965680 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:22 crc kubenswrapper[4762]: E1009 13:27:22.965864 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:22 crc kubenswrapper[4762]: E1009 13:27:22.965980 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:24 crc kubenswrapper[4762]: E1009 13:27:24.954460 4762 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 09 13:27:24 crc kubenswrapper[4762]: I1009 13:27:24.965092 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:24 crc kubenswrapper[4762]: I1009 13:27:24.967098 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:24 crc kubenswrapper[4762]: I1009 13:27:24.967132 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:24 crc kubenswrapper[4762]: I1009 13:27:24.967182 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:24 crc kubenswrapper[4762]: E1009 13:27:24.967198 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:24 crc kubenswrapper[4762]: E1009 13:27:24.967468 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:24 crc kubenswrapper[4762]: E1009 13:27:24.967572 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:24 crc kubenswrapper[4762]: E1009 13:27:24.967817 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:25 crc kubenswrapper[4762]: E1009 13:27:25.072211 4762 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:27:26 crc kubenswrapper[4762]: I1009 13:27:26.964746 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:26 crc kubenswrapper[4762]: E1009 13:27:26.966427 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:26 crc kubenswrapper[4762]: I1009 13:27:26.965236 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:26 crc kubenswrapper[4762]: E1009 13:27:26.966560 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:26 crc kubenswrapper[4762]: I1009 13:27:26.966171 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:26 crc kubenswrapper[4762]: E1009 13:27:26.966690 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:26 crc kubenswrapper[4762]: I1009 13:27:26.965085 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:26 crc kubenswrapper[4762]: E1009 13:27:26.966832 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:27 crc kubenswrapper[4762]: I1009 13:27:27.965318 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:27:27 crc kubenswrapper[4762]: E1009 13:27:27.966313 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jl67g_openshift-ovn-kubernetes(92662de9-9784-432a-92d2-a668f815e8fd)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" Oct 09 13:27:28 crc kubenswrapper[4762]: I1009 13:27:28.964783 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:28 crc kubenswrapper[4762]: I1009 13:27:28.964838 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:28 crc kubenswrapper[4762]: E1009 13:27:28.964924 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:28 crc kubenswrapper[4762]: I1009 13:27:28.964961 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:28 crc kubenswrapper[4762]: E1009 13:27:28.965025 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:28 crc kubenswrapper[4762]: I1009 13:27:28.965023 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:28 crc kubenswrapper[4762]: E1009 13:27:28.965208 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:28 crc kubenswrapper[4762]: E1009 13:27:28.965319 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:30 crc kubenswrapper[4762]: E1009 13:27:30.073849 4762 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:27:30 crc kubenswrapper[4762]: I1009 13:27:30.965043 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:30 crc kubenswrapper[4762]: I1009 13:27:30.965085 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:30 crc kubenswrapper[4762]: I1009 13:27:30.965174 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:30 crc kubenswrapper[4762]: I1009 13:27:30.965281 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:30 crc kubenswrapper[4762]: E1009 13:27:30.965272 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:30 crc kubenswrapper[4762]: E1009 13:27:30.965470 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:30 crc kubenswrapper[4762]: E1009 13:27:30.965552 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:30 crc kubenswrapper[4762]: E1009 13:27:30.965695 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:32 crc kubenswrapper[4762]: I1009 13:27:32.964252 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:32 crc kubenswrapper[4762]: I1009 13:27:32.965038 4762 scope.go:117] "RemoveContainer" containerID="0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af" Oct 09 13:27:32 crc kubenswrapper[4762]: I1009 13:27:32.964452 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:32 crc kubenswrapper[4762]: I1009 13:27:32.965970 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:32 crc kubenswrapper[4762]: I1009 13:27:32.966248 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:32 crc kubenswrapper[4762]: E1009 13:27:32.966370 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:32 crc kubenswrapper[4762]: E1009 13:27:32.966240 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:32 crc kubenswrapper[4762]: E1009 13:27:32.966473 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:32 crc kubenswrapper[4762]: E1009 13:27:32.966614 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:33 crc kubenswrapper[4762]: I1009 13:27:33.599526 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/1.log" Oct 09 13:27:33 crc kubenswrapper[4762]: I1009 13:27:33.599689 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerStarted","Data":"c4e90ed0cd948905cef0fd260cd724a92fc07f4002a7151e2fb955ec9ae0bb6f"} Oct 09 13:27:34 crc kubenswrapper[4762]: I1009 13:27:34.964425 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:34 crc kubenswrapper[4762]: E1009 13:27:34.967253 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:34 crc kubenswrapper[4762]: I1009 13:27:34.967285 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:34 crc kubenswrapper[4762]: I1009 13:27:34.967287 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:34 crc kubenswrapper[4762]: I1009 13:27:34.967308 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:34 crc kubenswrapper[4762]: E1009 13:27:34.967464 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:34 crc kubenswrapper[4762]: E1009 13:27:34.967688 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:34 crc kubenswrapper[4762]: E1009 13:27:34.967790 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:35 crc kubenswrapper[4762]: E1009 13:27:35.075677 4762 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:27:36 crc kubenswrapper[4762]: I1009 13:27:36.965160 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:36 crc kubenswrapper[4762]: I1009 13:27:36.965266 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:36 crc kubenswrapper[4762]: I1009 13:27:36.965277 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:36 crc kubenswrapper[4762]: E1009 13:27:36.965340 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:36 crc kubenswrapper[4762]: I1009 13:27:36.965362 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:36 crc kubenswrapper[4762]: E1009 13:27:36.965478 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:36 crc kubenswrapper[4762]: E1009 13:27:36.965590 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:36 crc kubenswrapper[4762]: E1009 13:27:36.965775 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:38 crc kubenswrapper[4762]: I1009 13:27:38.964398 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:38 crc kubenswrapper[4762]: E1009 13:27:38.965186 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:38 crc kubenswrapper[4762]: I1009 13:27:38.964447 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:38 crc kubenswrapper[4762]: I1009 13:27:38.964414 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:38 crc kubenswrapper[4762]: E1009 13:27:38.965405 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:38 crc kubenswrapper[4762]: I1009 13:27:38.964546 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:38 crc kubenswrapper[4762]: E1009 13:27:38.965702 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:38 crc kubenswrapper[4762]: E1009 13:27:38.965769 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:40 crc kubenswrapper[4762]: E1009 13:27:40.076803 4762 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:27:40 crc kubenswrapper[4762]: I1009 13:27:40.964588 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:40 crc kubenswrapper[4762]: I1009 13:27:40.964679 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:40 crc kubenswrapper[4762]: I1009 13:27:40.964694 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:40 crc kubenswrapper[4762]: I1009 13:27:40.964680 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:40 crc kubenswrapper[4762]: E1009 13:27:40.964792 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:40 crc kubenswrapper[4762]: E1009 13:27:40.964919 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:40 crc kubenswrapper[4762]: E1009 13:27:40.965071 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:40 crc kubenswrapper[4762]: E1009 13:27:40.965221 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:41 crc kubenswrapper[4762]: I1009 13:27:41.965562 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.631368 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/3.log" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.633419 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerStarted","Data":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.633887 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.663807 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podStartSLOduration=118.663783925 podStartE2EDuration="1m58.663783925s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:42.662736796 +0000 UTC m=+138.436527845" watchObservedRunningTime="2025-10-09 13:27:42.663783925 +0000 UTC m=+138.437574954" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.964922 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.964999 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.965006 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:42 crc kubenswrapper[4762]: E1009 13:27:42.965100 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:42 crc kubenswrapper[4762]: I1009 13:27:42.965219 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:42 crc kubenswrapper[4762]: E1009 13:27:42.965336 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:42 crc kubenswrapper[4762]: E1009 13:27:42.965398 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:42 crc kubenswrapper[4762]: E1009 13:27:42.965503 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:43 crc kubenswrapper[4762]: I1009 13:27:43.096228 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k4bwn"] Oct 09 13:27:43 crc kubenswrapper[4762]: I1009 13:27:43.636679 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:43 crc kubenswrapper[4762]: E1009 13:27:43.637261 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:44 crc kubenswrapper[4762]: I1009 13:27:44.964900 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:44 crc kubenswrapper[4762]: E1009 13:27:44.967260 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:44 crc kubenswrapper[4762]: I1009 13:27:44.967311 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:44 crc kubenswrapper[4762]: I1009 13:27:44.967386 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:44 crc kubenswrapper[4762]: E1009 13:27:44.967527 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:44 crc kubenswrapper[4762]: E1009 13:27:44.967787 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:45 crc kubenswrapper[4762]: E1009 13:27:45.078220 4762 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:27:45 crc kubenswrapper[4762]: I1009 13:27:45.964315 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:45 crc kubenswrapper[4762]: E1009 13:27:45.964448 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:46 crc kubenswrapper[4762]: I1009 13:27:46.964794 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:46 crc kubenswrapper[4762]: E1009 13:27:46.965887 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:46 crc kubenswrapper[4762]: I1009 13:27:46.964828 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:46 crc kubenswrapper[4762]: I1009 13:27:46.964880 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:46 crc kubenswrapper[4762]: E1009 13:27:46.966145 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:46 crc kubenswrapper[4762]: E1009 13:27:46.966341 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:47 crc kubenswrapper[4762]: I1009 13:27:47.964115 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:47 crc kubenswrapper[4762]: E1009 13:27:47.964266 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:48 crc kubenswrapper[4762]: I1009 13:27:48.964769 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:48 crc kubenswrapper[4762]: I1009 13:27:48.964842 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:48 crc kubenswrapper[4762]: I1009 13:27:48.964960 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:48 crc kubenswrapper[4762]: E1009 13:27:48.964966 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 13:27:48 crc kubenswrapper[4762]: E1009 13:27:48.965094 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 13:27:48 crc kubenswrapper[4762]: E1009 13:27:48.965259 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 13:27:49 crc kubenswrapper[4762]: I1009 13:27:49.964525 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:49 crc kubenswrapper[4762]: E1009 13:27:49.965353 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k4bwn" podUID="f9a76399-c2ae-487b-a52c-f0e271fb1d20" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.964696 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.964870 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.964724 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.967209 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.967700 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.967527 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 09 13:27:50 crc kubenswrapper[4762]: I1009 13:27:50.968022 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 09 13:27:51 crc kubenswrapper[4762]: I1009 13:27:51.964733 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:27:51 crc kubenswrapper[4762]: I1009 13:27:51.966985 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 09 13:27:51 crc kubenswrapper[4762]: I1009 13:27:51.967347 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.827271 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.827434 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:52 crc kubenswrapper[4762]: E1009 13:27:52.827496 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:29:54.827465279 +0000 UTC m=+270.601256338 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.827550 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.832151 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.834743 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.928882 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.928955 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.932725 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:52 crc kubenswrapper[4762]: I1009 13:27:52.933404 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.080775 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.087533 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.094473 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 13:27:53 crc kubenswrapper[4762]: W1009 13:27:53.313001 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-f46178693fccac536cd1a524de221b7585d9643752a14ac22ddf798d1a55e293 WatchSource:0}: Error finding container f46178693fccac536cd1a524de221b7585d9643752a14ac22ddf798d1a55e293: Status 404 returned error can't find the container with id f46178693fccac536cd1a524de221b7585d9643752a14ac22ddf798d1a55e293 Oct 09 13:27:53 crc kubenswrapper[4762]: W1009 13:27:53.320542 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-f0789a7c5cff91c71e818560bcf48753e94d3c255d845e656c8d95044658ddd2 WatchSource:0}: Error finding container f0789a7c5cff91c71e818560bcf48753e94d3c255d845e656c8d95044658ddd2: Status 404 returned error can't find the container with id f0789a7c5cff91c71e818560bcf48753e94d3c255d845e656c8d95044658ddd2 Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.667180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6be3040cef37d2e6496ffe78c5a999f612c4cc83109ad0e504f8f8662068e3a6"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.667236 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f46178693fccac536cd1a524de221b7585d9643752a14ac22ddf798d1a55e293"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.667426 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.672329 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"873a878fd95a3ed26a27a40dc54f29251ed3e1c17824cb7d628aa32306e16917"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.672383 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d7d896e22348afc11cbc6bc99d3e4445898a0fbbfc33ca5492ee32f184cef75b"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.673931 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a75f9d38641d0eeaabb55038f16f27632e136690f911549ddd25994395ff45f1"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.673963 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f0789a7c5cff91c71e818560bcf48753e94d3c255d845e656c8d95044658ddd2"} Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.783591 4762 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.815351 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.816035 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.816443 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pmdsg"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.820439 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.820554 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.820703 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.820895 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.821165 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.821934 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.823129 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.839131 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.839440 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.839806 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.840108 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.840322 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.840588 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.840736 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-924rd"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.840850 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841197 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841221 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841432 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841471 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841691 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.841996 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.842375 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pmdsg"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.842443 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.843036 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-l2j7b"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.843362 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.843835 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.844193 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.844829 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-r4p46"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.845151 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.846595 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.846972 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.847240 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.847470 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.847529 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-77hfw"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853115 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853385 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853601 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853763 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853842 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.853910 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854017 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854055 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854149 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854280 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854369 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854494 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854628 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854750 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854833 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854857 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854912 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854980 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855004 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855080 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855264 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855278 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.854509 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855450 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855542 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855625 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855725 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855808 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855411 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855941 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856013 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.855264 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856442 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856544 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856653 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856738 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856838 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856937 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.857031 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.857122 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.857205 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.856109 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.858041 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-h972j"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.858174 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.858263 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.858496 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.858738 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859083 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859195 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859412 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859437 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-d4bsv"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859615 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859692 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859914 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.859995 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.860157 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.860174 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.864567 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.864852 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.865021 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.865167 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.866605 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-xzppk"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.867266 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.867914 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.868288 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.870671 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.870966 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.871222 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.872480 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.873096 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.873149 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.873591 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.873950 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.873989 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.874755 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.899477 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.906311 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.906544 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.906884 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.907424 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.907565 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.907745 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.908817 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.909867 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910053 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910097 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910324 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910532 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910703 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.910867 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911008 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911156 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911251 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911312 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911736 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911771 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911156 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.911984 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.912072 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.912138 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.912213 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.912237 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.912286 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.913364 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.914281 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.915708 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.915981 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.916310 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.916493 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.916537 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.917255 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.918009 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-8tz5k"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.918354 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.920072 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.922308 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.922824 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.922954 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.923352 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.923755 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.924507 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.924838 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.925296 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.927849 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.928432 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b7276"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.928977 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.929369 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-865sv"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.929437 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.929992 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.931968 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.931967 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.932188 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.933415 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.934217 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.938753 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.939037 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.940918 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941448 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e7318993-282b-4a86-907d-fc810869cc7c-audit-dir\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941490 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-images\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941510 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941567 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe7e464e-6acb-478d-8de2-ea89f6e19734-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htg29\" (UniqueName: \"kubernetes.io/projected/05d81e4d-1c18-4533-96c4-888a906b2c25-kube-api-access-htg29\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941651 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941681 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-config\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941697 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/76be61d0-03ad-4822-8097-2ef2bde86bf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941722 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-srv-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941736 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-serving-cert\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941752 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be104342-f3c8-4e61-bd52-81e26e001325-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941766 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941779 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941792 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-image-import-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941809 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3818a2d1-8618-4b53-b326-afb6cbdfaf38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941825 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmkk7\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-kube-api-access-xmkk7\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941844 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be104342-f3c8-4e61-bd52-81e26e001325-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941877 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941899 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-serving-cert\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941919 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-key\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941942 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941962 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-client\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.941982 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnk84\" (UniqueName: \"kubernetes.io/projected/cb588149-29d3-47af-9359-406e69f9fb33-kube-api-access-hnk84\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.942001 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-serving-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.942025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.942045 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943361 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943405 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be104342-f3c8-4e61-bd52-81e26e001325-config\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943427 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tknm4\" (UniqueName: \"kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943465 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-encryption-config\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943484 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl7pd\" (UniqueName: \"kubernetes.io/projected/d542877b-87f6-4d96-9256-ff58662d9547-kube-api-access-jl7pd\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943498 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/462d08cc-03ff-4c48-8002-172bc184d6ea-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943516 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943530 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3818a2d1-8618-4b53-b326-afb6cbdfaf38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943550 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3380c41e-02cc-4bb7-97b4-b3df719ea736-serving-cert\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943566 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-node-pullsecrets\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943584 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf4cd740-a799-47d8-9ce6-88bce9afa952-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943599 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcxlq\" (UniqueName: \"kubernetes.io/projected/bf4cd740-a799-47d8-9ce6-88bce9afa952-kube-api-access-zcxlq\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943616 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkmqc\" (UniqueName: \"kubernetes.io/projected/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-kube-api-access-jkmqc\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943644 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-cabundle\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943658 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-client\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943674 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb588149-29d3-47af-9359-406e69f9fb33-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943692 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943792 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smk9c\" (UniqueName: \"kubernetes.io/projected/776fba5d-1567-4a5d-a090-b8fbf37a583b-kube-api-access-smk9c\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943815 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e245c27d-066f-4422-9cdb-e5b7525b8717-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943836 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cwmf\" (UniqueName: \"kubernetes.io/projected/6241b81b-a379-4339-86df-eb7baf5c1ec0-kube-api-access-5cwmf\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943859 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rnwh\" (UniqueName: \"kubernetes.io/projected/3818a2d1-8618-4b53-b326-afb6cbdfaf38-kube-api-access-6rnwh\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943881 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp9rd\" (UniqueName: \"kubernetes.io/projected/03a48f34-2c09-4130-bed4-6e33b080909e-kube-api-access-hp9rd\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943902 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7e464e-6acb-478d-8de2-ea89f6e19734-config\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943922 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943969 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btl44\" (UniqueName: \"kubernetes.io/projected/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-kube-api-access-btl44\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.943991 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtznr\" (UniqueName: \"kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944011 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944054 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-audit-dir\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944078 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/03a48f34-2c09-4130-bed4-6e33b080909e-machine-approver-tls\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944099 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe7e464e-6acb-478d-8de2-ea89f6e19734-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944128 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944334 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq5qs\" (UniqueName: \"kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944376 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-trusted-ca\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944438 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-audit\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944458 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-etcd-client\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944479 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-config\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944498 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944515 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/776fba5d-1567-4a5d-a090-b8fbf37a583b-serving-cert\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944538 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpmxx\" (UniqueName: \"kubernetes.io/projected/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-kube-api-access-rpmxx\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944573 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944615 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-service-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944685 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-service-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944721 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-encryption-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944740 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/462d08cc-03ff-4c48-8002-172bc184d6ea-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944766 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knfgj\" (UniqueName: \"kubernetes.io/projected/3380c41e-02cc-4bb7-97b4-b3df719ea736-kube-api-access-knfgj\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944787 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzwjq\" (UniqueName: \"kubernetes.io/projected/50b92b8d-158e-4ec4-aaed-a5c83aafeb8b-kube-api-access-xzwjq\") pod \"downloads-7954f5f757-l2j7b\" (UID: \"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b\") " pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944819 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944839 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-audit-policies\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944855 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-config\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944883 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-serving-cert\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944901 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944938 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7sfq\" (UniqueName: \"kubernetes.io/projected/e245c27d-066f-4422-9cdb-e5b7525b8717-kube-api-access-x7sfq\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944965 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-metrics-tls\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.944999 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffgjt\" (UniqueName: \"kubernetes.io/projected/76be61d0-03ad-4822-8097-2ef2bde86bf1-kube-api-access-ffgjt\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945118 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e245c27d-066f-4422-9cdb-e5b7525b8717-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945192 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-serving-cert\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945225 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-auth-proxy-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945246 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bpqs\" (UniqueName: \"kubernetes.io/projected/e7318993-282b-4a86-907d-fc810869cc7c-kube-api-access-9bpqs\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945281 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cdzs\" (UniqueName: \"kubernetes.io/projected/4d528172-a0ee-4b06-a09c-08eb92b12cee-kube-api-access-6cdzs\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945301 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945326 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-config\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.945351 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6241b81b-a379-4339-86df-eb7baf5c1ec0-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.949722 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.950407 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.950512 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.956610 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.957321 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.957688 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.958370 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.960786 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n8mp7"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.977299 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.980964 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.981113 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.991912 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-924rd"] Oct 09 13:27:53 crc kubenswrapper[4762]: I1009 13:27:53.997360 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.007318 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.013846 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6ncgf"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.015243 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.019986 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.020575 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.026378 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l2j7b"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.028768 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.032737 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-r4p46"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.036784 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.039774 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.042831 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-h972j"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.044696 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-d4bsv"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046299 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046707 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-key\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046748 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046772 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-client\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046794 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnk84\" (UniqueName: \"kubernetes.io/projected/cb588149-29d3-47af-9359-406e69f9fb33-kube-api-access-hnk84\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046816 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046838 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046858 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046884 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-serving-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046904 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be104342-f3c8-4e61-bd52-81e26e001325-config\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046924 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tknm4\" (UniqueName: \"kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046947 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046969 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-encryption-config\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.046991 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl7pd\" (UniqueName: \"kubernetes.io/projected/d542877b-87f6-4d96-9256-ff58662d9547-kube-api-access-jl7pd\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047010 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/462d08cc-03ff-4c48-8002-172bc184d6ea-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047033 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047052 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3818a2d1-8618-4b53-b326-afb6cbdfaf38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047074 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3380c41e-02cc-4bb7-97b4-b3df719ea736-serving-cert\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-node-pullsecrets\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047115 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf4cd740-a799-47d8-9ce6-88bce9afa952-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047136 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcxlq\" (UniqueName: \"kubernetes.io/projected/bf4cd740-a799-47d8-9ce6-88bce9afa952-kube-api-access-zcxlq\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047158 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkmqc\" (UniqueName: \"kubernetes.io/projected/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-kube-api-access-jkmqc\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047179 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-cabundle\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047201 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-client\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047224 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb588149-29d3-47af-9359-406e69f9fb33-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047244 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047283 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smk9c\" (UniqueName: \"kubernetes.io/projected/776fba5d-1567-4a5d-a090-b8fbf37a583b-kube-api-access-smk9c\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047312 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e245c27d-066f-4422-9cdb-e5b7525b8717-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047342 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cwmf\" (UniqueName: \"kubernetes.io/projected/6241b81b-a379-4339-86df-eb7baf5c1ec0-kube-api-access-5cwmf\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047364 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rnwh\" (UniqueName: \"kubernetes.io/projected/3818a2d1-8618-4b53-b326-afb6cbdfaf38-kube-api-access-6rnwh\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047388 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp9rd\" (UniqueName: \"kubernetes.io/projected/03a48f34-2c09-4130-bed4-6e33b080909e-kube-api-access-hp9rd\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047412 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7e464e-6acb-478d-8de2-ea89f6e19734-config\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047433 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btl44\" (UniqueName: \"kubernetes.io/projected/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-kube-api-access-btl44\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047476 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtznr\" (UniqueName: \"kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047496 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047516 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047536 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-audit-dir\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047559 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/03a48f34-2c09-4130-bed4-6e33b080909e-machine-approver-tls\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047582 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe7e464e-6acb-478d-8de2-ea89f6e19734-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047606 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047628 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq5qs\" (UniqueName: \"kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047670 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-trusted-ca\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047692 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-audit\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047713 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-etcd-client\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047737 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-config\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047758 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047781 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/776fba5d-1567-4a5d-a090-b8fbf37a583b-serving-cert\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047804 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpmxx\" (UniqueName: \"kubernetes.io/projected/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-kube-api-access-rpmxx\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047826 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047847 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047869 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-service-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047893 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-service-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047915 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-encryption-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047935 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/462d08cc-03ff-4c48-8002-172bc184d6ea-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047959 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knfgj\" (UniqueName: \"kubernetes.io/projected/3380c41e-02cc-4bb7-97b4-b3df719ea736-kube-api-access-knfgj\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.047980 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzwjq\" (UniqueName: \"kubernetes.io/projected/50b92b8d-158e-4ec4-aaed-a5c83aafeb8b-kube-api-access-xzwjq\") pod \"downloads-7954f5f757-l2j7b\" (UID: \"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b\") " pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048031 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048064 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-audit-policies\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048088 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-serving-cert\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048110 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048133 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-config\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048149 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-serving-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7sfq\" (UniqueName: \"kubernetes.io/projected/e245c27d-066f-4422-9cdb-e5b7525b8717-kube-api-access-x7sfq\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048166 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048188 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-metrics-tls\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048240 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffgjt\" (UniqueName: \"kubernetes.io/projected/76be61d0-03ad-4822-8097-2ef2bde86bf1-kube-api-access-ffgjt\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048274 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e245c27d-066f-4422-9cdb-e5b7525b8717-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048297 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-serving-cert\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048327 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-auth-proxy-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048355 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cdzs\" (UniqueName: \"kubernetes.io/projected/4d528172-a0ee-4b06-a09c-08eb92b12cee-kube-api-access-6cdzs\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048398 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bpqs\" (UniqueName: \"kubernetes.io/projected/e7318993-282b-4a86-907d-fc810869cc7c-kube-api-access-9bpqs\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048422 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-config\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048449 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6241b81b-a379-4339-86df-eb7baf5c1ec0-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048488 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-images\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048509 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe7e464e-6acb-478d-8de2-ea89f6e19734-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htg29\" (UniqueName: \"kubernetes.io/projected/05d81e4d-1c18-4533-96c4-888a906b2c25-kube-api-access-htg29\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048577 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e7318993-282b-4a86-907d-fc810869cc7c-audit-dir\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048598 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048954 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-config\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048978 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/76be61d0-03ad-4822-8097-2ef2bde86bf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049005 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q645\" (UniqueName: \"kubernetes.io/projected/07eb0a4b-0831-4b9c-8883-a624f6a27488-kube-api-access-6q645\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049032 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-srv-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049061 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-serving-cert\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049090 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049112 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049132 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-image-import-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049151 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3818a2d1-8618-4b53-b326-afb6cbdfaf38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049175 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmkk7\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-kube-api-access-xmkk7\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049196 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be104342-f3c8-4e61-bd52-81e26e001325-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049219 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be104342-f3c8-4e61-bd52-81e26e001325-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049243 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049265 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049286 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-serving-cert\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049317 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07eb0a4b-0831-4b9c-8883-a624f6a27488-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.049341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07eb0a4b-0831-4b9c-8883-a624f6a27488-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.052499 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b7276"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.052551 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.052563 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048640 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be104342-f3c8-4e61-bd52-81e26e001325-config\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.056295 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.056341 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.056352 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.056730 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.057516 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e245c27d-066f-4422-9cdb-e5b7525b8717-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.057765 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.058416 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-config\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.048921 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.059782 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-node-pullsecrets\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.059810 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.060484 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.061142 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.061366 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-77hfw"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.061542 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.061894 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-image-import-ca\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.062321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3818a2d1-8618-4b53-b326-afb6cbdfaf38-config\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.063164 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.063804 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/03a48f34-2c09-4130-bed4-6e33b080909e-auth-proxy-config\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.063998 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3818a2d1-8618-4b53-b326-afb6cbdfaf38-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.064249 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-images\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.064646 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe7e464e-6acb-478d-8de2-ea89f6e19734-config\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.065179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.065525 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e7318993-282b-4a86-907d-fc810869cc7c-audit-dir\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.066090 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-config\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.067632 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.067999 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.068211 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-xzppk"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.068896 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.069281 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.069353 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/05d81e4d-1c18-4533-96c4-888a906b2c25-audit-dir\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.069489 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-service-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.069822 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-client\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.070099 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-service-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.070219 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.071085 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-serving-cert\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072172 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-etcd-ca\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072198 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072308 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3380c41e-02cc-4bb7-97b4-b3df719ea736-config\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072538 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072586 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072612 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072802 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-serving-cert\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.072864 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e7318993-282b-4a86-907d-fc810869cc7c-audit-policies\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.073540 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf4cd740-a799-47d8-9ce6-88bce9afa952-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.074315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.074440 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.075064 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/776fba5d-1567-4a5d-a090-b8fbf37a583b-trusted-ca\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.075263 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf4cd740-a799-47d8-9ce6-88bce9afa952-config\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.075557 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.075874 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e245c27d-066f-4422-9cdb-e5b7525b8717-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.077758 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/462d08cc-03ff-4c48-8002-172bc184d6ea-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.077920 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6tzrl"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078071 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-etcd-client\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078149 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078341 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078544 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-encryption-config\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078833 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/05d81e4d-1c18-4533-96c4-888a906b2c25-audit\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.078884 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.079162 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3380c41e-02cc-4bb7-97b4-b3df719ea736-serving-cert\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.080007 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/462d08cc-03ff-4c48-8002-172bc184d6ea-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.080084 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.080143 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pmkqc"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.080556 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.081373 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.081772 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be104342-f3c8-4e61-bd52-81e26e001325-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.082041 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-etcd-client\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.082593 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe7e464e-6acb-478d-8de2-ea89f6e19734-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.083006 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-metrics-tls\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.083037 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-865sv"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.084416 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-serving-cert\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.084877 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7318993-282b-4a86-907d-fc810869cc7c-serving-cert\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.085315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/05d81e4d-1c18-4533-96c4-888a906b2c25-encryption-config\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.085428 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.086096 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/03a48f34-2c09-4130-bed4-6e33b080909e-machine-approver-tls\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.086865 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.087420 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/776fba5d-1567-4a5d-a090-b8fbf37a583b-serving-cert\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.089320 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.091502 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cb588149-29d3-47af-9359-406e69f9fb33-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.092103 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6tzrl"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.093167 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-key\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.098039 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.099782 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.101560 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.103876 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.107261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.109305 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.110934 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.112548 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.114796 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pmkqc"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.115954 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-sx9r4"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.116567 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.117728 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n8mp7"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.118033 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.125355 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sx9r4"] Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.139132 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.150399 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4d528172-a0ee-4b06-a09c-08eb92b12cee-signing-cabundle\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.150599 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q645\" (UniqueName: \"kubernetes.io/projected/07eb0a4b-0831-4b9c-8883-a624f6a27488-kube-api-access-6q645\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.150676 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07eb0a4b-0831-4b9c-8883-a624f6a27488-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.150700 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07eb0a4b-0831-4b9c-8883-a624f6a27488-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.158176 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.178891 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.197976 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.201849 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-srv-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.218304 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.227987 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d542877b-87f6-4d96-9256-ff58662d9547-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.239277 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.258104 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.263536 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6241b81b-a379-4339-86df-eb7baf5c1ec0-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.278085 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.282346 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/76be61d0-03ad-4822-8097-2ef2bde86bf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.317674 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.338225 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.358443 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.378227 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.403892 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.418542 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.439072 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.458764 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.478696 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.498305 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.518978 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.539164 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.558957 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.579285 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.597680 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.618619 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.624389 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07eb0a4b-0831-4b9c-8883-a624f6a27488-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.639856 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.659309 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.662048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07eb0a4b-0831-4b9c-8883-a624f6a27488-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.679417 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.698696 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.718393 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.738792 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.758967 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.791802 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.798982 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.817865 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.843999 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.860224 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.879108 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.898172 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.918321 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.936710 4762 request.go:700] Waited for 1.00417134s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-system-session&limit=500&resourceVersion=0 Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.938187 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.958347 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.978194 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 09 13:27:54 crc kubenswrapper[4762]: I1009 13:27:54.997770 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.018045 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.038715 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.058147 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.077664 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.098279 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.118206 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.137865 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.159627 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.179354 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.198673 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.229301 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.238581 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.259529 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.278013 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.299937 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.319151 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.348731 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.358759 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.379045 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.398733 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.419545 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.439597 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.458486 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.478364 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.498354 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.518291 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.539234 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.559032 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.579093 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.618494 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.639524 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.658519 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.696597 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tknm4\" (UniqueName: \"kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4\") pod \"route-controller-manager-6576b87f9c-c4rs8\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.713327 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smk9c\" (UniqueName: \"kubernetes.io/projected/776fba5d-1567-4a5d-a090-b8fbf37a583b-kube-api-access-smk9c\") pod \"console-operator-58897d9998-r4p46\" (UID: \"776fba5d-1567-4a5d-a090-b8fbf37a583b\") " pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.731306 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.732935 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcxlq\" (UniqueName: \"kubernetes.io/projected/bf4cd740-a799-47d8-9ce6-88bce9afa952-kube-api-access-zcxlq\") pod \"machine-api-operator-5694c8668f-pmdsg\" (UID: \"bf4cd740-a799-47d8-9ce6-88bce9afa952\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.755110 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkmqc\" (UniqueName: \"kubernetes.io/projected/fc3dda6a-2c92-4d44-b2b6-70f89b397af4-kube-api-access-jkmqc\") pod \"etcd-operator-b45778765-h972j\" (UID: \"fc3dda6a-2c92-4d44-b2b6-70f89b397af4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.774737 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnk84\" (UniqueName: \"kubernetes.io/projected/cb588149-29d3-47af-9359-406e69f9fb33-kube-api-access-hnk84\") pod \"cluster-samples-operator-665b6dd947-v2xzj\" (UID: \"cb588149-29d3-47af-9359-406e69f9fb33\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.790984 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffgjt\" (UniqueName: \"kubernetes.io/projected/76be61d0-03ad-4822-8097-2ef2bde86bf1-kube-api-access-ffgjt\") pod \"control-plane-machine-set-operator-78cbb6b69f-c599r\" (UID: \"76be61d0-03ad-4822-8097-2ef2bde86bf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.811941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.840996 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl7pd\" (UniqueName: \"kubernetes.io/projected/d542877b-87f6-4d96-9256-ff58662d9547-kube-api-access-jl7pd\") pod \"olm-operator-6b444d44fb-qpmbq\" (UID: \"d542877b-87f6-4d96-9256-ff58662d9547\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.853701 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.858773 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cwmf\" (UniqueName: \"kubernetes.io/projected/6241b81b-a379-4339-86df-eb7baf5c1ec0-kube-api-access-5cwmf\") pod \"package-server-manager-789f6589d5-vj7lx\" (UID: \"6241b81b-a379-4339-86df-eb7baf5c1ec0\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.874224 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rnwh\" (UniqueName: \"kubernetes.io/projected/3818a2d1-8618-4b53-b326-afb6cbdfaf38-kube-api-access-6rnwh\") pod \"openshift-apiserver-operator-796bbdcf4f-btvjf\" (UID: \"3818a2d1-8618-4b53-b326-afb6cbdfaf38\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.887252 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.893295 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp9rd\" (UniqueName: \"kubernetes.io/projected/03a48f34-2c09-4130-bed4-6e33b080909e-kube-api-access-hp9rd\") pod \"machine-approver-56656f9798-zxx57\" (UID: \"03a48f34-2c09-4130-bed4-6e33b080909e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:55 crc kubenswrapper[4762]: W1009 13:27:55.893601 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod414ea71c_9878_4a8a_825f_3d18ff8460d9.slice/crio-fd528e46b687a21b47574fffcd2e91e21b67d3cc31a2f9dbb5d240be3c29e0fc WatchSource:0}: Error finding container fd528e46b687a21b47574fffcd2e91e21b67d3cc31a2f9dbb5d240be3c29e0fc: Status 404 returned error can't find the container with id fd528e46b687a21b47574fffcd2e91e21b67d3cc31a2f9dbb5d240be3c29e0fc Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.910496 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cdzs\" (UniqueName: \"kubernetes.io/projected/4d528172-a0ee-4b06-a09c-08eb92b12cee-kube-api-access-6cdzs\") pod \"service-ca-9c57cc56f-xzppk\" (UID: \"4d528172-a0ee-4b06-a09c-08eb92b12cee\") " pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.922737 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.923474 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.934466 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bpqs\" (UniqueName: \"kubernetes.io/projected/e7318993-282b-4a86-907d-fc810869cc7c-kube-api-access-9bpqs\") pod \"apiserver-7bbb656c7d-8pmjh\" (UID: \"e7318993-282b-4a86-907d-fc810869cc7c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.939971 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.940066 4762 request.go:700] Waited for 1.877637169s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.956571 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmkk7\" (UniqueName: \"kubernetes.io/projected/462d08cc-03ff-4c48-8002-172bc184d6ea-kube-api-access-xmkk7\") pod \"cluster-image-registry-operator-dc59b4c8b-n64hz\" (UID: \"462d08cc-03ff-4c48-8002-172bc184d6ea\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.965121 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.975707 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btl44\" (UniqueName: \"kubernetes.io/projected/e5a0e5db-1ea7-4af8-9f2d-db55d53645ab-kube-api-access-btl44\") pod \"dns-operator-744455d44c-77hfw\" (UID: \"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab\") " pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.978677 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.978765 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.981772 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.990917 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.996566 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtznr\" (UniqueName: \"kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr\") pod \"console-f9d7485db-r5hfv\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:55 crc kubenswrapper[4762]: I1009 13:27:55.999443 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.003978 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.016220 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fe7e464e-6acb-478d-8de2-ea89f6e19734-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-hjtqj\" (UID: \"fe7e464e-6acb-478d-8de2-ea89f6e19734\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.042136 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-r4p46"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.044334 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htg29\" (UniqueName: \"kubernetes.io/projected/05d81e4d-1c18-4533-96c4-888a906b2c25-kube-api-access-htg29\") pod \"apiserver-76f77b778f-924rd\" (UID: \"05d81e4d-1c18-4533-96c4-888a906b2c25\") " pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.056453 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7sfq\" (UniqueName: \"kubernetes.io/projected/e245c27d-066f-4422-9cdb-e5b7525b8717-kube-api-access-x7sfq\") pod \"openshift-controller-manager-operator-756b6f6bc6-9dv9p\" (UID: \"e245c27d-066f-4422-9cdb-e5b7525b8717\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.056803 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.074812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq5qs\" (UniqueName: \"kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs\") pod \"controller-manager-879f6c89f-xzd8z\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.083066 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.101686 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be104342-f3c8-4e61-bd52-81e26e001325-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hwt8t\" (UID: \"be104342-f3c8-4e61-bd52-81e26e001325\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.113323 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.117595 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpmxx\" (UniqueName: \"kubernetes.io/projected/9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0-kube-api-access-rpmxx\") pod \"openshift-config-operator-7777fb866f-8zwlj\" (UID: \"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.138265 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knfgj\" (UniqueName: \"kubernetes.io/projected/3380c41e-02cc-4bb7-97b4-b3df719ea736-kube-api-access-knfgj\") pod \"authentication-operator-69f744f599-d4bsv\" (UID: \"3380c41e-02cc-4bb7-97b4-b3df719ea736\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.141091 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.158709 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.165604 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzwjq\" (UniqueName: \"kubernetes.io/projected/50b92b8d-158e-4ec4-aaed-a5c83aafeb8b-kube-api-access-xzwjq\") pod \"downloads-7954f5f757-l2j7b\" (UID: \"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b\") " pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.178606 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-h972j"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.180897 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.197876 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.219103 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.227768 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc3dda6a_2c92_4d44_b2b6_70f89b397af4.slice/crio-1c794f73fbfb9247db1c923f9db48ed587ec4975994b0a123a662cf14a04c539 WatchSource:0}: Error finding container 1c794f73fbfb9247db1c923f9db48ed587ec4975994b0a123a662cf14a04c539: Status 404 returned error can't find the container with id 1c794f73fbfb9247db1c923f9db48ed587ec4975994b0a123a662cf14a04c539 Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.228128 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.239997 4762 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.244872 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.252233 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.261007 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.261102 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.268380 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.278915 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.298779 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.316181 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.318141 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.346190 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.346627 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.384388 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q645\" (UniqueName: \"kubernetes.io/projected/07eb0a4b-0831-4b9c-8883-a624f6a27488-kube-api-access-6q645\") pod \"kube-storage-version-migrator-operator-b67b599dd-mbgsr\" (UID: \"07eb0a4b-0831-4b9c-8883-a624f6a27488\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.407091 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.462178 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.469887 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493285 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55edcb39-8b18-4aea-84b8-2e0b332835e1-serving-cert\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493324 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-default-certificate\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493364 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99jx2\" (UniqueName: \"kubernetes.io/projected/9a65c59f-b10d-4c4c-9890-a74abb1395ee-kube-api-access-99jx2\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493387 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-apiservice-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493424 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26kh8\" (UniqueName: \"kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493473 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-webhook-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493518 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55edcb39-8b18-4aea-84b8-2e0b332835e1-config\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493540 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493588 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b32c6cfb-bf88-4de9-a939-44e1b67b882d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493608 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.493630 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.498788 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-stats-auth\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.498841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvbns\" (UniqueName: \"kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.498870 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b32c6cfb-bf88-4de9-a939-44e1b67b882d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.498890 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.498985 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-profile-collector-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499005 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-certs\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499022 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-service-ca-bundle\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499048 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499086 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499158 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499176 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499249 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499275 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499302 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499356 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499383 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499398 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499452 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499505 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhmz2\" (UniqueName: \"kubernetes.io/projected/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-kube-api-access-zhmz2\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/43ee5885-188a-4435-a40f-dfb077d01a84-tmpfs\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499618 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-images\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499669 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/592376f8-472a-4014-bd15-b08b04ac15e8-proxy-tls\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499691 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b32c6cfb-bf88-4de9-a939-44e1b67b882d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499709 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499746 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f64ae43-a8e0-4e6c-919d-842572e8b107-trusted-ca\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499796 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499810 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jgvf\" (UniqueName: \"kubernetes.io/projected/43ee5885-188a-4435-a40f-dfb077d01a84-kube-api-access-8jgvf\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499839 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8vrg\" (UniqueName: \"kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499880 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb258\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-kube-api-access-sb258\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.499958 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gbr6\" (UniqueName: \"kubernetes.io/projected/ae26c8ab-99c5-456f-b029-e56e684bf502-kube-api-access-8gbr6\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.500006 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f4c75ed-fc02-4492-832b-36a064fe8b26-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.500032 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.500977 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2f64ae43-a8e0-4e6c-919d-842572e8b107-metrics-tls\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501019 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-srv-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501108 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501142 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-node-bootstrap-token\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501168 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501211 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501346 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjqgq\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501375 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b9w9\" (UniqueName: \"kubernetes.io/projected/6889890c-fe1b-47c6-9e3f-e5f23d41a1a3-kube-api-access-7b9w9\") pod \"migrator-59844c95c7-zn6jm\" (UID: \"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501397 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.501420 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f4c75ed-fc02-4492-832b-36a064fe8b26-proxy-tls\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504226 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504280 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnqdt\" (UniqueName: \"kubernetes.io/projected/e54d2423-97ff-4c1e-9f04-786495eace4a-kube-api-access-xnqdt\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504390 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m47r\" (UniqueName: \"kubernetes.io/projected/55edcb39-8b18-4aea-84b8-2e0b332835e1-kube-api-access-7m47r\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504420 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504448 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj2fl\" (UniqueName: \"kubernetes.io/projected/2f4c75ed-fc02-4492-832b-36a064fe8b26-kube-api-access-bj2fl\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504506 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9a65c59f-b10d-4c4c-9890-a74abb1395ee-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504531 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-metrics-certs\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.504588 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjrpw\" (UniqueName: \"kubernetes.io/projected/592376f8-472a-4014-bd15-b08b04ac15e8-kube-api-access-cjrpw\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.505826 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.005806317 +0000 UTC m=+152.779597446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.513301 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd542877b_87f6_4d96_9256_ff58662d9547.slice/crio-3536ad1d2bda259064ed69d1e08884815c014563242ad29122f34b3cea1c64b4 WatchSource:0}: Error finding container 3536ad1d2bda259064ed69d1e08884815c014563242ad29122f34b3cea1c64b4: Status 404 returned error can't find the container with id 3536ad1d2bda259064ed69d1e08884815c014563242ad29122f34b3cea1c64b4 Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.605079 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.605310 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.610868 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.611865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.611932 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.611960 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.611981 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-mountpoint-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612001 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-csi-data-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612057 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612076 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhmz2\" (UniqueName: \"kubernetes.io/projected/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-kube-api-access-zhmz2\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612095 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/43ee5885-188a-4435-a40f-dfb077d01a84-tmpfs\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612128 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84933f66-f8be-458d-87b4-b49a4dbe0dc3-config-volume\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612148 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612165 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-images\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/592376f8-472a-4014-bd15-b08b04ac15e8-proxy-tls\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612226 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b32c6cfb-bf88-4de9-a939-44e1b67b882d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612243 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612265 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f64ae43-a8e0-4e6c-919d-842572e8b107-trusted-ca\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612287 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-socket-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612304 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612321 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jgvf\" (UniqueName: \"kubernetes.io/projected/43ee5885-188a-4435-a40f-dfb077d01a84-kube-api-access-8jgvf\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612343 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8vrg\" (UniqueName: \"kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612369 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb258\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-kube-api-access-sb258\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612402 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8wwq\" (UniqueName: \"kubernetes.io/projected/a8ffac8c-bde6-4c86-9671-94764560a87a-kube-api-access-k8wwq\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612444 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gbr6\" (UniqueName: \"kubernetes.io/projected/ae26c8ab-99c5-456f-b029-e56e684bf502-kube-api-access-8gbr6\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612468 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f4c75ed-fc02-4492-832b-36a064fe8b26-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612482 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612499 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2f64ae43-a8e0-4e6c-919d-842572e8b107-metrics-tls\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612514 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-srv-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612555 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612571 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-node-bootstrap-token\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612585 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612602 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612624 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84933f66-f8be-458d-87b4-b49a4dbe0dc3-metrics-tls\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612672 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjqgq\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612724 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b9w9\" (UniqueName: \"kubernetes.io/projected/6889890c-fe1b-47c6-9e3f-e5f23d41a1a3-kube-api-access-7b9w9\") pod \"migrator-59844c95c7-zn6jm\" (UID: \"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612741 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612755 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f4c75ed-fc02-4492-832b-36a064fe8b26-proxy-tls\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612781 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnqdt\" (UniqueName: \"kubernetes.io/projected/e54d2423-97ff-4c1e-9f04-786495eace4a-kube-api-access-xnqdt\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612805 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m47r\" (UniqueName: \"kubernetes.io/projected/55edcb39-8b18-4aea-84b8-2e0b332835e1-kube-api-access-7m47r\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612822 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612836 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj2fl\" (UniqueName: \"kubernetes.io/projected/2f4c75ed-fc02-4492-832b-36a064fe8b26-kube-api-access-bj2fl\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612854 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9a65c59f-b10d-4c4c-9890-a74abb1395ee-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612870 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-metrics-certs\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612887 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjrpw\" (UniqueName: \"kubernetes.io/projected/592376f8-472a-4014-bd15-b08b04ac15e8-kube-api-access-cjrpw\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612904 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55edcb39-8b18-4aea-84b8-2e0b332835e1-serving-cert\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612920 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-default-certificate\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612937 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fp28\" (UniqueName: \"kubernetes.io/projected/84933f66-f8be-458d-87b4-b49a4dbe0dc3-kube-api-access-6fp28\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612956 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99jx2\" (UniqueName: \"kubernetes.io/projected/9a65c59f-b10d-4c4c-9890-a74abb1395ee-kube-api-access-99jx2\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612973 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-apiservice-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.612990 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwrqk\" (UniqueName: \"kubernetes.io/projected/410067d0-d751-4783-a663-e429c72c805c-kube-api-access-rwrqk\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613008 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26kh8\" (UniqueName: \"kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613024 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-webhook-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613042 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55edcb39-8b18-4aea-84b8-2e0b332835e1-config\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613057 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613077 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b32c6cfb-bf88-4de9-a939-44e1b67b882d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613110 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613129 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-stats-auth\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613149 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvbns\" (UniqueName: \"kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613168 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b32c6cfb-bf88-4de9-a939-44e1b67b882d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613184 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613203 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ffac8c-bde6-4c86-9671-94764560a87a-cert\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-profile-collector-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613243 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-certs\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613258 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-service-ca-bundle\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613273 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-registration-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613288 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-plugins-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613304 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613323 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613343 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613359 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613380 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613396 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.613415 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.614439 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.615588 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.615977 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/43ee5885-188a-4435-a40f-dfb077d01a84-tmpfs\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.616497 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.619041 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-images\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.619429 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/592376f8-472a-4014-bd15-b08b04ac15e8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.620207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-service-ca-bundle\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.621225 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.121199225 +0000 UTC m=+152.894990324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.621997 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.623966 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.627018 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.630260 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f64ae43-a8e0-4e6c-919d-842572e8b107-trusted-ca\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.630561 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.633368 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-node-bootstrap-token\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.634774 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.634916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55edcb39-8b18-4aea-84b8-2e0b332835e1-config\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.635601 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b32c6cfb-bf88-4de9-a939-44e1b67b882d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.636293 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.636892 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.638508 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2f4c75ed-fc02-4492-832b-36a064fe8b26-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.639039 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-webhook-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.639284 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.640229 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.640925 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.640998 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-stats-auth\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.641456 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.641506 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.642702 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-xzppk"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.646691 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.648117 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2f4c75ed-fc02-4492-832b-36a064fe8b26-proxy-tls\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.648995 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9a65c59f-b10d-4c4c-9890-a74abb1395ee-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.649919 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pmdsg"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.650444 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/592376f8-472a-4014-bd15-b08b04ac15e8-proxy-tls\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.663121 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.669346 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.669632 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-metrics-certs\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.669642 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.672034 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.687082 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m47r\" (UniqueName: \"kubernetes.io/projected/55edcb39-8b18-4aea-84b8-2e0b332835e1-kube-api-access-7m47r\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.690029 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhmz2\" (UniqueName: \"kubernetes.io/projected/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-kube-api-access-zhmz2\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.692026 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" event={"ID":"3818a2d1-8618-4b53-b326-afb6cbdfaf38","Type":"ContainerStarted","Data":"8edee48d135ed7a049dc36efe3b74a4bb5ad0d705b8f25e4470a2fa62b55010f"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.694487 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.694917 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2f64ae43-a8e0-4e6c-919d-842572e8b107-metrics-tls\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.694990 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55edcb39-8b18-4aea-84b8-2e0b332835e1-serving-cert\") pod \"service-ca-operator-777779d784-865sv\" (UID: \"55edcb39-8b18-4aea-84b8-2e0b332835e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.695919 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/43ee5885-188a-4435-a40f-dfb077d01a84-apiservice-cert\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.696058 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.696495 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.697132 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e54d2423-97ff-4c1e-9f04-786495eace4a-certs\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.703387 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.703687 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-profile-collector-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.704874 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5-default-certificate\") pod \"router-default-5444994796-8tz5k\" (UID: \"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5\") " pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.705377 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-r4p46" event={"ID":"776fba5d-1567-4a5d-a090-b8fbf37a583b","Type":"ContainerStarted","Data":"7e49eb5c5ab4f68cf51679c898d37ca4bc7ae5eaed5508e62ae19d7c6403f4f6"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.705418 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-r4p46" event={"ID":"776fba5d-1567-4a5d-a090-b8fbf37a583b","Type":"ContainerStarted","Data":"8d99c203283a11ca8f9ca79de1b74e4288dfdbae8fe4d2da75c980952d094c86"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.705910 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.706903 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b32c6cfb-bf88-4de9-a939-44e1b67b882d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.707492 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.709960 4762 patch_prober.go:28] interesting pod/console-operator-58897d9998-r4p46 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.710387 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-r4p46" podUID="776fba5d-1567-4a5d-a090-b8fbf37a583b" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.713058 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ae26c8ab-99c5-456f-b029-e56e684bf502-srv-cert\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.714397 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwrqk\" (UniqueName: \"kubernetes.io/projected/410067d0-d751-4783-a663-e429c72c805c-kube-api-access-rwrqk\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.714869 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ffac8c-bde6-4c86-9671-94764560a87a-cert\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.714592 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" event={"ID":"414ea71c-9878-4a8a-825f-3d18ff8460d9","Type":"ContainerStarted","Data":"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.715069 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" event={"ID":"414ea71c-9878-4a8a-825f-3d18ff8460d9","Type":"ContainerStarted","Data":"fd528e46b687a21b47574fffcd2e91e21b67d3cc31a2f9dbb5d240be3c29e0fc"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.716048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jgvf\" (UniqueName: \"kubernetes.io/projected/43ee5885-188a-4435-a40f-dfb077d01a84-kube-api-access-8jgvf\") pod \"packageserver-d55dfcdfc-ljrw4\" (UID: \"43ee5885-188a-4435-a40f-dfb077d01a84\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.716667 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-registration-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.716693 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-plugins-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.717005 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-registration-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.717084 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-plugins-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.717195 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.717954 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-mountpoint-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718002 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-csi-data-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718022 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84933f66-f8be-458d-87b4-b49a4dbe0dc3-config-volume\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718045 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-socket-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718084 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8wwq\" (UniqueName: \"kubernetes.io/projected/a8ffac8c-bde6-4c86-9671-94764560a87a-kube-api-access-k8wwq\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718111 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84933f66-f8be-458d-87b4-b49a4dbe0dc3-metrics-tls\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718138 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-csi-data-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718127 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-mountpoint-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718168 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.718238 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fp28\" (UniqueName: \"kubernetes.io/projected/84933f66-f8be-458d-87b4-b49a4dbe0dc3-kube-api-access-6fp28\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.718809 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.218798982 +0000 UTC m=+152.992590021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.719713 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84933f66-f8be-458d-87b4-b49a4dbe0dc3-config-volume\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.719784 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/410067d0-d751-4783-a663-e429c72c805c-socket-dir\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.720338 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjrpw\" (UniqueName: \"kubernetes.io/projected/592376f8-472a-4014-bd15-b08b04ac15e8-kube-api-access-cjrpw\") pod \"machine-config-operator-74547568cd-b7276\" (UID: \"592376f8-472a-4014-bd15-b08b04ac15e8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.720623 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" event={"ID":"cb588149-29d3-47af-9359-406e69f9fb33","Type":"ContainerStarted","Data":"b57f14f0bddd83811ad874b14a88b5ba2fac7c611d250f165d1018a40cb36670"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.720669 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" event={"ID":"cb588149-29d3-47af-9359-406e69f9fb33","Type":"ContainerStarted","Data":"a1f078ea6dc23c4ebdbbb997510e05fdc7562888182562b8a76d0902bec81efc"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.721904 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ffac8c-bde6-4c86-9671-94764560a87a-cert\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.723577 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod462d08cc_03ff_4c48_8002_172bc184d6ea.slice/crio-75a8354accdf641b07cc669c2fc81f2624acdc2c2824cc3001798ae8114f83de WatchSource:0}: Error finding container 75a8354accdf641b07cc669c2fc81f2624acdc2c2824cc3001798ae8114f83de: Status 404 returned error can't find the container with id 75a8354accdf641b07cc669c2fc81f2624acdc2c2824cc3001798ae8114f83de Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.726749 4762 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-c4rs8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.726826 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.729545 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" event={"ID":"03a48f34-2c09-4130-bed4-6e33b080909e","Type":"ContainerStarted","Data":"b6b70984ef1379e2c4b7ed5c7bd6233cffa8eecb9a8a1924533d86c13c79fc13"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.729607 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" event={"ID":"03a48f34-2c09-4130-bed4-6e33b080909e","Type":"ContainerStarted","Data":"f852d3964cd006f0f92d6784601e2027d34faef8cd15a170cad5a1afa37534b4"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.732041 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.732404 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84933f66-f8be-458d-87b4-b49a4dbe0dc3-metrics-tls\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.736678 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" event={"ID":"fc3dda6a-2c92-4d44-b2b6-70f89b397af4","Type":"ContainerStarted","Data":"7f312b31e8619bf28056610ccfc681ef8abfdec172376c015d9b5139f7114994"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.736873 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" event={"ID":"fc3dda6a-2c92-4d44-b2b6-70f89b397af4","Type":"ContainerStarted","Data":"1c794f73fbfb9247db1c923f9db48ed587ec4975994b0a123a662cf14a04c539"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.746813 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" event={"ID":"d542877b-87f6-4d96-9256-ff58662d9547","Type":"ContainerStarted","Data":"3536ad1d2bda259064ed69d1e08884815c014563242ad29122f34b3cea1c64b4"} Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.747845 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8vrg\" (UniqueName: \"kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg\") pod \"marketplace-operator-79b997595-9957f\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.748047 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76be61d0_03ad_4822_8097_2ef2bde86bf1.slice/crio-9796a2a42d9e0858b40ea0edab4d795efe5df8e6ee3a53365f8148475b2d7586 WatchSource:0}: Error finding container 9796a2a42d9e0858b40ea0edab4d795efe5df8e6ee3a53365f8148475b2d7586: Status 404 returned error can't find the container with id 9796a2a42d9e0858b40ea0edab4d795efe5df8e6ee3a53365f8148475b2d7586 Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.748429 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf4cd740_a799_47d8_9ce6_88bce9afa952.slice/crio-3a51e0ccd0ef536654d6f4df7c9f0602e17c2484f2752604446fd1e53fe4f8bb WatchSource:0}: Error finding container 3a51e0ccd0ef536654d6f4df7c9f0602e17c2484f2752604446fd1e53fe4f8bb: Status 404 returned error can't find the container with id 3a51e0ccd0ef536654d6f4df7c9f0602e17c2484f2752604446fd1e53fe4f8bb Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.774423 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b32c6cfb-bf88-4de9-a939-44e1b67b882d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lxg6x\" (UID: \"b32c6cfb-bf88-4de9-a939-44e1b67b882d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.776998 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjqgq\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.785579 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.819272 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.819799 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.319782182 +0000 UTC m=+153.093573221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.820113 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.820377 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.320368739 +0000 UTC m=+153.094159778 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.825558 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gbr6\" (UniqueName: \"kubernetes.io/projected/ae26c8ab-99c5-456f-b029-e56e684bf502-kube-api-access-8gbr6\") pod \"catalog-operator-68c6474976-59vf6\" (UID: \"ae26c8ab-99c5-456f-b029-e56e684bf502\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.833014 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj2fl\" (UniqueName: \"kubernetes.io/projected/2f4c75ed-fc02-4492-832b-36a064fe8b26-kube-api-access-bj2fl\") pod \"machine-config-controller-84d6567774-ff6kr\" (UID: \"2f4c75ed-fc02-4492-832b-36a064fe8b26\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.847748 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-77hfw"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.848007 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.856675 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.867559 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-924rd"] Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.881219 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb258\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-kube-api-access-sb258\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:56 crc kubenswrapper[4762]: W1009 13:27:56.905773 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5a0e5db_1ea7_4af8_9f2d_db55d53645ab.slice/crio-a86b6947c8d329d9aecef2ff1195fcd5012798c1ab6f646aa2277a9bf25f35eb WatchSource:0}: Error finding container a86b6947c8d329d9aecef2ff1195fcd5012798c1ab6f646aa2277a9bf25f35eb: Status 404 returned error can't find the container with id a86b6947c8d329d9aecef2ff1195fcd5012798c1ab6f646aa2277a9bf25f35eb Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.909207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvbns\" (UniqueName: \"kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns\") pod \"collect-profiles-29333595-d52ht\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.918023 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.922961 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.923097 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b9w9\" (UniqueName: \"kubernetes.io/projected/6889890c-fe1b-47c6-9e3f-e5f23d41a1a3-kube-api-access-7b9w9\") pod \"migrator-59844c95c7-zn6jm\" (UID: \"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.923291 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:56 crc kubenswrapper[4762]: E1009 13:27:56.923669 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.423642053 +0000 UTC m=+153.197433082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.935636 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.936679 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnqdt\" (UniqueName: \"kubernetes.io/projected/e54d2423-97ff-4c1e-9f04-786495eace4a-kube-api-access-xnqdt\") pod \"machine-config-server-6ncgf\" (UID: \"e54d2423-97ff-4c1e-9f04-786495eace4a\") " pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.945922 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.955999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99jx2\" (UniqueName: \"kubernetes.io/projected/9a65c59f-b10d-4c4c-9890-a74abb1395ee-kube-api-access-99jx2\") pod \"multus-admission-controller-857f4d67dd-n8mp7\" (UID: \"9a65c59f-b10d-4c4c-9890-a74abb1395ee\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.957320 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.965850 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.974849 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.978415 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26kh8\" (UniqueName: \"kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8\") pod \"oauth-openshift-558db77b4-8x7j8\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.985134 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.992075 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:27:56 crc kubenswrapper[4762]: I1009 13:27:56.996057 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f64ae43-a8e0-4e6c-919d-842572e8b107-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8x6nc\" (UID: \"2f64ae43-a8e0-4e6c-919d-842572e8b107\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.012826 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.013564 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.016084 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-l2j7b"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.020065 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.023007 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwrqk\" (UniqueName: \"kubernetes.io/projected/410067d0-d751-4783-a663-e429c72c805c-kube-api-access-rwrqk\") pod \"csi-hostpathplugin-pmkqc\" (UID: \"410067d0-d751-4783-a663-e429c72c805c\") " pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.024219 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.025026 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.525007863 +0000 UTC m=+153.298798902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.027111 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6ncgf" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.042207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fp28\" (UniqueName: \"kubernetes.io/projected/84933f66-f8be-458d-87b4-b49a4dbe0dc3-kube-api-access-6fp28\") pod \"dns-default-6tzrl\" (UID: \"84933f66-f8be-458d-87b4-b49a4dbe0dc3\") " pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.044974 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6tzrl" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.058549 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.063329 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8wwq\" (UniqueName: \"kubernetes.io/projected/a8ffac8c-bde6-4c86-9671-94764560a87a-kube-api-access-k8wwq\") pod \"ingress-canary-sx9r4\" (UID: \"a8ffac8c-bde6-4c86-9671-94764560a87a\") " pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.066948 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.068008 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-sx9r4" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.073396 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-d4bsv"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.082362 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.082872 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.105219 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj"] Oct 09 13:27:57 crc kubenswrapper[4762]: W1009 13:27:57.109766 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10df5c6e_5eb3_4ba8_b98d_805b82bd6ca5.slice/crio-52fdaf97a46d4e9950d6198b92df69317e20096605cdaf447f1da272d28157ab WatchSource:0}: Error finding container 52fdaf97a46d4e9950d6198b92df69317e20096605cdaf447f1da272d28157ab: Status 404 returned error can't find the container with id 52fdaf97a46d4e9950d6198b92df69317e20096605cdaf447f1da272d28157ab Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.127091 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.127442 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.627426663 +0000 UTC m=+153.401217702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: W1009 13:27:57.141077 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe104342_f3c8_4e61_bd52_81e26e001325.slice/crio-9e2969cee74c777bd14af45fd0edad42515cbad5487c9855ef829ac9f858556e WatchSource:0}: Error finding container 9e2969cee74c777bd14af45fd0edad42515cbad5487c9855ef829ac9f858556e: Status 404 returned error can't find the container with id 9e2969cee74c777bd14af45fd0edad42515cbad5487c9855ef829ac9f858556e Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.209261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.225603 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.239840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.242202 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.742187373 +0000 UTC m=+153.515978412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.252323 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.341088 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.341511 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.841492247 +0000 UTC m=+153.615283286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.420603 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-865sv"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.444163 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.444424 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:57.944412441 +0000 UTC m=+153.718203480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.545495 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.546678 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.046636486 +0000 UTC m=+153.820427535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.555431 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.652867 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.653151 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.153140368 +0000 UTC m=+153.926931407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.669324 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b7276"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.754271 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.754402 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.254378976 +0000 UTC m=+154.028170015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.755005 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.755313 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.255304931 +0000 UTC m=+154.029095970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.785753 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" event={"ID":"6241b81b-a379-4339-86df-eb7baf5c1ec0","Type":"ContainerStarted","Data":"c389d92d20c9ed5cc6b447d851eb10b5aaeac682ebcfc52e48585e551843c5ea"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.785787 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" event={"ID":"6241b81b-a379-4339-86df-eb7baf5c1ec0","Type":"ContainerStarted","Data":"041701fec992a28f3a6884ff8ef1c55ce7e48d41508435e8798eb3ef22456493"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.788537 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" event={"ID":"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0","Type":"ContainerStarted","Data":"21d2aeeac1f001c659683552efba30999edd270c64323f7a58aa05f8c61c9bbc"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.789617 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-8tz5k" event={"ID":"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5","Type":"ContainerStarted","Data":"52fdaf97a46d4e9950d6198b92df69317e20096605cdaf447f1da272d28157ab"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.790674 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" event={"ID":"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab","Type":"ContainerStarted","Data":"d9e929efee8bed1a549a9fae0a43324204b23e7219d983f8497e0374630bfe66"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.790693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" event={"ID":"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab","Type":"ContainerStarted","Data":"a86b6947c8d329d9aecef2ff1195fcd5012798c1ab6f646aa2277a9bf25f35eb"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.795685 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" event={"ID":"bf4cd740-a799-47d8-9ce6-88bce9afa952","Type":"ContainerStarted","Data":"c7fd76b181c5a811017f5e47ff6af7d858f66e12fb45e0503ea1f8cd72164292"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.795732 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" event={"ID":"bf4cd740-a799-47d8-9ce6-88bce9afa952","Type":"ContainerStarted","Data":"3a51e0ccd0ef536654d6f4df7c9f0602e17c2484f2752604446fd1e53fe4f8bb"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.802741 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" event={"ID":"cb588149-29d3-47af-9359-406e69f9fb33","Type":"ContainerStarted","Data":"d7534c3bebb5310e7a710ab44c6562a00da13479cc0e21fa4411b67f35f3198a"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.805811 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" event={"ID":"3380c41e-02cc-4bb7-97b4-b3df719ea736","Type":"ContainerStarted","Data":"bc5253389d5694b5c68c8e03bbabe94b6d32e375e38295f9ffab567ecc6a25cd"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.808111 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" event={"ID":"462d08cc-03ff-4c48-8002-172bc184d6ea","Type":"ContainerStarted","Data":"f1d1f4a3a606386e9a447aec613d06feea921c6b58b7da43ff5148b2184e4513"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.808132 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" event={"ID":"462d08cc-03ff-4c48-8002-172bc184d6ea","Type":"ContainerStarted","Data":"75a8354accdf641b07cc669c2fc81f2624acdc2c2824cc3001798ae8114f83de"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.841405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" event={"ID":"03a48f34-2c09-4130-bed4-6e33b080909e","Type":"ContainerStarted","Data":"351701f6d97aa82b0ec4f398d65ae02b26493d28a51bd5ca1ab91e735314bb37"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.845600 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.851313 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" event={"ID":"07eb0a4b-0831-4b9c-8883-a624f6a27488","Type":"ContainerStarted","Data":"ba31cca2ac0f98af8ea93aaf882b170a3ee728f6d0f488f51ff042a84da4dacf"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.855999 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" event={"ID":"4d528172-a0ee-4b06-a09c-08eb92b12cee","Type":"ContainerStarted","Data":"55377ca915d4f41ee2af71ac2ed36188468404babff81359e5d7c9a3ce7c102f"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.856362 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" event={"ID":"4d528172-a0ee-4b06-a09c-08eb92b12cee","Type":"ContainerStarted","Data":"1d6762960f5bad04962c16d0c34034b33469a0a3c85391517300d59edb624961"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.861171 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.861464 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.361447493 +0000 UTC m=+154.135238532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.861575 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.862158 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" event={"ID":"55edcb39-8b18-4aea-84b8-2e0b332835e1","Type":"ContainerStarted","Data":"f13754c8339ffb985cb90b3e7208b7e0f4a4050ef9baa17c950c8d8b23642021"} Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.863146 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.36313489 +0000 UTC m=+154.136926019 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.867045 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-924rd" event={"ID":"05d81e4d-1c18-4533-96c4-888a906b2c25","Type":"ContainerStarted","Data":"11b6c3e231663ec61e2366a30de71b2f9f7fa6983a5ade8b923d0e4c69e39b49"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.877325 4762 generic.go:334] "Generic (PLEG): container finished" podID="e7318993-282b-4a86-907d-fc810869cc7c" containerID="769df86ffcb89f61c1b691ac1365ee7aa131b0e4b2f7d825b6c550d7f96f4940" exitCode=0 Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.877446 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" event={"ID":"e7318993-282b-4a86-907d-fc810869cc7c","Type":"ContainerDied","Data":"769df86ffcb89f61c1b691ac1365ee7aa131b0e4b2f7d825b6c550d7f96f4940"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.877484 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" event={"ID":"e7318993-282b-4a86-907d-fc810869cc7c","Type":"ContainerStarted","Data":"d56ef3b8a410a60f3890b28dd7bde646b13c6a54caac56bd6c11a5386d5ce7d5"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.884346 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" event={"ID":"ae26c8ab-99c5-456f-b029-e56e684bf502","Type":"ContainerStarted","Data":"092caeb25b7d422881bd46f0f3b35d1f9879fe9cbdb4625d4f9e1e29ce87595f"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.888815 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l2j7b" event={"ID":"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b","Type":"ContainerStarted","Data":"0e9e3ce613a179cc2a24159a5f54ab2b8e24fb28762610982786b7ffd363416a"} Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.933412 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht"] Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.941793 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-r4p46" podStartSLOduration=133.941773435 podStartE2EDuration="2m13.941773435s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:57.940908751 +0000 UTC m=+153.714699790" watchObservedRunningTime="2025-10-09 13:27:57.941773435 +0000 UTC m=+153.715564474" Oct 09 13:27:57 crc kubenswrapper[4762]: I1009 13:27:57.964217 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:57 crc kubenswrapper[4762]: E1009 13:27:57.964676 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.464620844 +0000 UTC m=+154.238411893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.000803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" event={"ID":"be104342-f3c8-4e61-bd52-81e26e001325","Type":"ContainerStarted","Data":"9e2969cee74c777bd14af45fd0edad42515cbad5487c9855ef829ac9f858556e"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.003693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" event={"ID":"2f4c75ed-fc02-4492-832b-36a064fe8b26","Type":"ContainerStarted","Data":"802e52e2986ab6c7f4aa207d9f7a5eb3c6133095e4ca9584b7136559f531cad9"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.024896 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" event={"ID":"e245c27d-066f-4422-9cdb-e5b7525b8717","Type":"ContainerStarted","Data":"917ae7133edd450197873c50155a9b6b848e86d2574c7c14b593675b38dfc7bb"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.024941 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" event={"ID":"e245c27d-066f-4422-9cdb-e5b7525b8717","Type":"ContainerStarted","Data":"fd37312f885b5f27f09ab6ffd17c8ecd25df18a2c4fb5eca59cda7bfde4ad8ed"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.050789 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" event={"ID":"76be61d0-03ad-4822-8097-2ef2bde86bf1","Type":"ContainerStarted","Data":"1e546610c995534e73a21ee72121a0886063a901a1aed7ba69d625bece7afdd3"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.050902 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" event={"ID":"76be61d0-03ad-4822-8097-2ef2bde86bf1","Type":"ContainerStarted","Data":"9796a2a42d9e0858b40ea0edab4d795efe5df8e6ee3a53365f8148475b2d7586"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.069329 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.097827 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" event={"ID":"d542877b-87f6-4d96-9256-ff58662d9547","Type":"ContainerStarted","Data":"51c0181f1ce3592169c5dc10181233b8ed2b3c6069f1c066714d5053f28394f4"} Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.098848 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.598692996 +0000 UTC m=+154.372484035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.104299 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.109875 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" event={"ID":"3818a2d1-8618-4b53-b326-afb6cbdfaf38","Type":"ContainerStarted","Data":"def5dc670d856687d5251c5cf6c88806c7e6748b4160d90fae29a3209cc0ed2a"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.118872 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.128197 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n8mp7"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.128249 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r5hfv" event={"ID":"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8","Type":"ContainerStarted","Data":"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.128354 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r5hfv" event={"ID":"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8","Type":"ContainerStarted","Data":"39f11010c23822796c7f5733c5dc2d2fc8c3220b7ceae4e24e5da2ccd4316b1a"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.132493 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.158067 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6tzrl"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.169480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" event={"ID":"5be8bc93-cb89-4cc7-822d-739708bab8a9","Type":"ContainerStarted","Data":"2a35dda04c99c041fd7d16fd1fbf447fb488e68d292462e81cb60aa72e30f0e2"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.169963 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.171578 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.671562582 +0000 UTC m=+154.445353621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.175205 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" event={"ID":"fe7e464e-6acb-478d-8de2-ea89f6e19734","Type":"ContainerStarted","Data":"390bb8ba085fbcd1134e98bd1d3885576824edd47b45ced03db2442cd652d440"} Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.189948 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.260034 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.271423 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.271773 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.7717596 +0000 UTC m=+154.545550639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.287069 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-r4p46" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.337328 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" podStartSLOduration=133.337313346 podStartE2EDuration="2m13.337313346s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.334000915 +0000 UTC m=+154.107791964" watchObservedRunningTime="2025-10-09 13:27:58.337313346 +0000 UTC m=+154.111104385" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.375315 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.376502 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.876482594 +0000 UTC m=+154.650273633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.431445 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.446563 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pmkqc"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.471335 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.480328 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.480669 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:58.980657923 +0000 UTC m=+154.754448962 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.516068 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc"] Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.518941 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-h972j" podStartSLOduration=133.518901686 podStartE2EDuration="2m13.518901686s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.515565244 +0000 UTC m=+154.289356283" watchObservedRunningTime="2025-10-09 13:27:58.518901686 +0000 UTC m=+154.292692725" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.581349 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.081325815 +0000 UTC m=+154.855116854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.581554 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.581919 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.582199 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.082188398 +0000 UTC m=+154.855979437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.587922 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-sx9r4"] Oct 09 13:27:58 crc kubenswrapper[4762]: W1009 13:27:58.589070 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod410067d0_d751_4783_a663_e429c72c805c.slice/crio-eac91d401da0bf9f78d3f17b618abb0e107ea7ace7d4cd81381a3c5b1d8c7d2b WatchSource:0}: Error finding container eac91d401da0bf9f78d3f17b618abb0e107ea7ace7d4cd81381a3c5b1d8c7d2b: Status 404 returned error can't find the container with id eac91d401da0bf9f78d3f17b618abb0e107ea7ace7d4cd81381a3c5b1d8c7d2b Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.617215 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-v2xzj" podStartSLOduration=134.617193552 podStartE2EDuration="2m14.617193552s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.613017617 +0000 UTC m=+154.386808676" watchObservedRunningTime="2025-10-09 13:27:58.617193552 +0000 UTC m=+154.390984591" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.669737 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n64hz" podStartSLOduration=133.669717528 podStartE2EDuration="2m13.669717528s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.664502435 +0000 UTC m=+154.438293474" watchObservedRunningTime="2025-10-09 13:27:58.669717528 +0000 UTC m=+154.443508567" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.683144 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.684205 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.184183306 +0000 UTC m=+154.957974345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.692154 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c599r" podStartSLOduration=133.692126985 podStartE2EDuration="2m13.692126985s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.69122777 +0000 UTC m=+154.465018819" watchObservedRunningTime="2025-10-09 13:27:58.692126985 +0000 UTC m=+154.465918024" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.744603 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-r5hfv" podStartSLOduration=134.744585509 podStartE2EDuration="2m14.744585509s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.738769439 +0000 UTC m=+154.512560478" watchObservedRunningTime="2025-10-09 13:27:58.744585509 +0000 UTC m=+154.518376548" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.785463 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.785802 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.285790864 +0000 UTC m=+155.059581903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.886270 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.886663 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.386628781 +0000 UTC m=+155.160419820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.886730 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.887014 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.387007291 +0000 UTC m=+155.160798330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.890453 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-xzppk" podStartSLOduration=133.890442105 podStartE2EDuration="2m13.890442105s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.855067801 +0000 UTC m=+154.628858850" watchObservedRunningTime="2025-10-09 13:27:58.890442105 +0000 UTC m=+154.664233144" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.894298 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qpmbq" podStartSLOduration=133.894284961 podStartE2EDuration="2m13.894284961s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.888530173 +0000 UTC m=+154.662321202" watchObservedRunningTime="2025-10-09 13:27:58.894284961 +0000 UTC m=+154.668075990" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.936854 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-btvjf" podStartSLOduration=133.936838193 podStartE2EDuration="2m13.936838193s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.935861366 +0000 UTC m=+154.709652425" watchObservedRunningTime="2025-10-09 13:27:58.936838193 +0000 UTC m=+154.710629232" Oct 09 13:27:58 crc kubenswrapper[4762]: I1009 13:27:58.988652 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:58 crc kubenswrapper[4762]: E1009 13:27:58.989025 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.48900993 +0000 UTC m=+155.262800969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.022521 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9dv9p" podStartSLOduration=134.022502022 podStartE2EDuration="2m14.022502022s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:58.97849375 +0000 UTC m=+154.752284789" watchObservedRunningTime="2025-10-09 13:27:59.022502022 +0000 UTC m=+154.796293061" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.083357 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zxx57" podStartSLOduration=135.083338587 podStartE2EDuration="2m15.083338587s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.024820735 +0000 UTC m=+154.798611774" watchObservedRunningTime="2025-10-09 13:27:59.083338587 +0000 UTC m=+154.857129626" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.090380 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.090695 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.590680838 +0000 UTC m=+155.364471877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.194229 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.195524 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.695500175 +0000 UTC m=+155.469291214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.218981 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" event={"ID":"be104342-f3c8-4e61-bd52-81e26e001325","Type":"ContainerStarted","Data":"f40fe9396bece949c50c50cd38a00ad4577a4d56a538a203e5869a4edf0c4049"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.221246 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" event={"ID":"3380c41e-02cc-4bb7-97b4-b3df719ea736","Type":"ContainerStarted","Data":"706cd53cfda697ed6bdb08efcde5eafba7090230702acb93e921a0d1ca426835"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.228058 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" event={"ID":"9a65c59f-b10d-4c4c-9890-a74abb1395ee","Type":"ContainerStarted","Data":"ac939fde1fc4acb17ba386ea0dc9b3a5af29ff9df8417f03a7357d14357a67d3"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.231218 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" event={"ID":"b32c6cfb-bf88-4de9-a939-44e1b67b882d","Type":"ContainerStarted","Data":"48bc178d1084f97442e992a0dfac4f82ae42670740699fb625ebb8077bdff39c"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.231274 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" event={"ID":"b32c6cfb-bf88-4de9-a939-44e1b67b882d","Type":"ContainerStarted","Data":"3e4ee3cc4ce98103058adfc0a7144c7398faae7892d10c0f7d370e9765978aaa"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.235132 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" event={"ID":"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3","Type":"ContainerStarted","Data":"b8fe72b14aade883ec1f7963c23403ee2fd7501507517d97967407bec3aab92a"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.235176 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" event={"ID":"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3","Type":"ContainerStarted","Data":"6db1c8f033c4ac3d4b2e55b84813eed0cd8b85fbdb0f206455d45b9f9c52f6ad"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.244500 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" event={"ID":"43ee5885-188a-4435-a40f-dfb077d01a84","Type":"ContainerStarted","Data":"31ef6fa471af51a5204dec64fcdc73ab4214c8e760ee058a8a68a8444c42a139"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.255224 4762 generic.go:334] "Generic (PLEG): container finished" podID="05d81e4d-1c18-4533-96c4-888a906b2c25" containerID="9ada3f47c7a33cb9ccd897718d7cdc4424c042cddbbc3f316727ded099dcf24c" exitCode=0 Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.255463 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-924rd" event={"ID":"05d81e4d-1c18-4533-96c4-888a906b2c25","Type":"ContainerDied","Data":"9ada3f47c7a33cb9ccd897718d7cdc4424c042cddbbc3f316727ded099dcf24c"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.281967 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" event={"ID":"6241b81b-a379-4339-86df-eb7baf5c1ec0","Type":"ContainerStarted","Data":"5f894ec0a27b42e8baa7ce20c8ef161299b0b67244fa4028e33b5117392308d5"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.282958 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.295372 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.296169 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.796154626 +0000 UTC m=+155.569945665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.306011 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-8tz5k" event={"ID":"10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5","Type":"ContainerStarted","Data":"9bb0fe77d93923b1cc2f8983296d5175b0a385d4776d4653c1136d0cf134b338"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.358854 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" event={"ID":"e5a0e5db-1ea7-4af8-9f2d-db55d53645ab","Type":"ContainerStarted","Data":"fbcedf166914cd98a487edaad51812c9de1e9d657d348458bb9800f6feee845d"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.367831 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" event={"ID":"07eb0a4b-0831-4b9c-8883-a624f6a27488","Type":"ContainerStarted","Data":"a910cb1f594ecfb113dd821938654fdbd446acdc538b827f5f86919a9073bdba"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.402802 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.407588 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.907566804 +0000 UTC m=+155.681357843 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.406044 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l2j7b" event={"ID":"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b","Type":"ContainerStarted","Data":"4e33a471d44f5bab0efe51351133ae34b4560ade3647bc77f0d225a63bb3ca77"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.408220 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.416159 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.416314 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.423509 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.427128 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:27:59.927109182 +0000 UTC m=+155.700900221 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.456838 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hwt8t" podStartSLOduration=134.45681331 podStartE2EDuration="2m14.45681331s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.424514881 +0000 UTC m=+155.198305920" watchObservedRunningTime="2025-10-09 13:27:59.45681331 +0000 UTC m=+155.230604349" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.464403 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sx9r4" event={"ID":"a8ffac8c-bde6-4c86-9671-94764560a87a","Type":"ContainerStarted","Data":"17fa85b96800d037ab396662fa4305a47fe9df26d313b8653f756ca56ccb7fe5"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.471653 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-8tz5k" podStartSLOduration=134.471614977 podStartE2EDuration="2m14.471614977s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.458447175 +0000 UTC m=+155.232238214" watchObservedRunningTime="2025-10-09 13:27:59.471614977 +0000 UTC m=+155.245406016" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.479987 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" event={"ID":"410067d0-d751-4783-a663-e429c72c805c","Type":"ContainerStarted","Data":"eac91d401da0bf9f78d3f17b618abb0e107ea7ace7d4cd81381a3c5b1d8c7d2b"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.481573 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" event={"ID":"55edcb39-8b18-4aea-84b8-2e0b332835e1","Type":"ContainerStarted","Data":"f2bf798b4dd86dcca00fecf07ca7b23c010a8defffc51ca526b0a3e72bd84c39"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.490154 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" event={"ID":"5be8bc93-cb89-4cc7-822d-739708bab8a9","Type":"ContainerStarted","Data":"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.490819 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.496434 4762 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xzd8z container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.496477 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.506765 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" event={"ID":"fe7e464e-6acb-478d-8de2-ea89f6e19734","Type":"ContainerStarted","Data":"11357724e32cdaa09efe3e15b8d6c863a0087b0dbbdf2e619b3a22f58b8a20d4"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.521468 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" event={"ID":"2f64ae43-a8e0-4e6c-919d-842572e8b107","Type":"ContainerStarted","Data":"f4107cb92fd836e3151ad315ec3158fc8fd1e9e9b0e15631e74ba8ff4371f549"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.525193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6tzrl" event={"ID":"84933f66-f8be-458d-87b4-b49a4dbe0dc3","Type":"ContainerStarted","Data":"20c627079b3341b300ab3d56761c569e6dfdd2cb21d4477cf33845289d1df7cc"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.526350 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.526413 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6ncgf" event={"ID":"e54d2423-97ff-4c1e-9f04-786495eace4a","Type":"ContainerStarted","Data":"8485f2f320ffc9cae3e1c3c773ccf02819000b7a88fbbfec4c1e84683e9cc41d"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.526451 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6ncgf" event={"ID":"e54d2423-97ff-4c1e-9f04-786495eace4a","Type":"ContainerStarted","Data":"217d3af0417e14be0f0d7171ba1ea445be1e2b749d6b097f85e607424fff6544"} Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.528092 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.028072392 +0000 UTC m=+155.801863451 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.532955 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" event={"ID":"fe403e6f-4f08-4263-962d-377d0989c0d7","Type":"ContainerStarted","Data":"4b7c2a660376a338893ca71fbf42df00ebbefb388ce6f4565d1e71bae9aa2502"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.533726 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.533244 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" podStartSLOduration=134.533226053 podStartE2EDuration="2m14.533226053s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.532318059 +0000 UTC m=+155.306109098" watchObservedRunningTime="2025-10-09 13:27:59.533226053 +0000 UTC m=+155.307017102" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.551471 4762 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9957f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.551516 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.572932 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" event={"ID":"d78ceb4a-7433-44e2-a874-37b69473fc54","Type":"ContainerStarted","Data":"b68b428280f15278dab0e390e965d5b45c3a18868128af523c6ee6f63893e1e3"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.577934 4762 generic.go:334] "Generic (PLEG): container finished" podID="9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0" containerID="ab5e93b7ba5344d3fa105ffff2452cb7afb484e1f8733944ffa34ba754e9b661" exitCode=0 Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.578124 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" event={"ID":"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0","Type":"ContainerDied","Data":"ab5e93b7ba5344d3fa105ffff2452cb7afb484e1f8733944ffa34ba754e9b661"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.586998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" event={"ID":"ae26c8ab-99c5-456f-b029-e56e684bf502","Type":"ContainerStarted","Data":"eb1fed50bc283b783cac5a6434db126948ab464bb1bbacff287ba1fe7e353173"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.587813 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.588714 4762 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-59vf6 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.588753 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" podUID="ae26c8ab-99c5-456f-b029-e56e684bf502" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.603967 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" event={"ID":"3b828691-c7c6-4899-b524-d867e7f151a4","Type":"ContainerStarted","Data":"55560df7338145e9510a0524ec0899fa8addbb1ac720d828e8d646a0e0178c4f"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.604023 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" event={"ID":"3b828691-c7c6-4899-b524-d867e7f151a4","Type":"ContainerStarted","Data":"6babeb0114ff6869fc506ae7054446face7219df1f0ac544183b4555f624c3ea"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.604207 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lxg6x" podStartSLOduration=134.604185387 podStartE2EDuration="2m14.604185387s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.592931428 +0000 UTC m=+155.366722487" watchObservedRunningTime="2025-10-09 13:27:59.604185387 +0000 UTC m=+155.377976426" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.629555 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.630864 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mbgsr" podStartSLOduration=134.630842281 podStartE2EDuration="2m14.630842281s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.628831137 +0000 UTC m=+155.402622166" watchObservedRunningTime="2025-10-09 13:27:59.630842281 +0000 UTC m=+155.404633320" Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.633087 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.133076353 +0000 UTC m=+155.906867392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.660027 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" event={"ID":"bf4cd740-a799-47d8-9ce6-88bce9afa952","Type":"ContainerStarted","Data":"cd99949a6fcb6ae0796bf7087196603dd8beb111a563ec3c44bc8118ee6c53ab"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.658470 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-77hfw" podStartSLOduration=134.658446232 podStartE2EDuration="2m14.658446232s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.657933337 +0000 UTC m=+155.431724396" watchObservedRunningTime="2025-10-09 13:27:59.658446232 +0000 UTC m=+155.432237271" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.732720 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.733887 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.233867619 +0000 UTC m=+156.007658658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.735105 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" event={"ID":"2f4c75ed-fc02-4492-832b-36a064fe8b26","Type":"ContainerStarted","Data":"51f67a865ec45e5a9d13b7f17c2c4c785cfdc6886042201995ef5b990250c686"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.738673 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" podStartSLOduration=135.73865705 podStartE2EDuration="2m15.73865705s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.73646971 +0000 UTC m=+155.510260749" watchObservedRunningTime="2025-10-09 13:27:59.73865705 +0000 UTC m=+155.512448089" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.742711 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-d4bsv" podStartSLOduration=134.742699241 podStartE2EDuration="2m14.742699241s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.707477702 +0000 UTC m=+155.481268741" watchObservedRunningTime="2025-10-09 13:27:59.742699241 +0000 UTC m=+155.516490280" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.754022 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" event={"ID":"592376f8-472a-4014-bd15-b08b04ac15e8","Type":"ContainerStarted","Data":"ecb55714526ffff2414627c7045f3796444aa52eb458c485a20339b0405fee2d"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.754067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" event={"ID":"592376f8-472a-4014-bd15-b08b04ac15e8","Type":"ContainerStarted","Data":"fef622acaa46d6282c69cd7d2fc93f1db9155afc1a14a8d2eb6f6e669b9d102d"} Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.780581 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-865sv" podStartSLOduration=134.780563454 podStartE2EDuration="2m14.780563454s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.777817538 +0000 UTC m=+155.551608577" watchObservedRunningTime="2025-10-09 13:27:59.780563454 +0000 UTC m=+155.554354493" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.837825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.843850 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.343835396 +0000 UTC m=+156.117626435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.896483 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-hjtqj" podStartSLOduration=134.896465055 podStartE2EDuration="2m14.896465055s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.895835687 +0000 UTC m=+155.669626726" watchObservedRunningTime="2025-10-09 13:27:59.896465055 +0000 UTC m=+155.670256094" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.925925 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.938566 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6ncgf" podStartSLOduration=6.938548424 podStartE2EDuration="6.938548424s" podCreationTimestamp="2025-10-09 13:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.934831711 +0000 UTC m=+155.708622750" watchObservedRunningTime="2025-10-09 13:27:59.938548424 +0000 UTC m=+155.712339463" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.940599 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:27:59 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:27:59 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:27:59 crc kubenswrapper[4762]: healthz check failed Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.940665 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.941848 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:27:59 crc kubenswrapper[4762]: E1009 13:27:59.942149 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.442135042 +0000 UTC m=+156.215926081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:27:59 crc kubenswrapper[4762]: I1009 13:27:59.979443 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" podStartSLOduration=134.979427789 podStartE2EDuration="2m14.979427789s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:27:59.97798371 +0000 UTC m=+155.751774749" watchObservedRunningTime="2025-10-09 13:27:59.979427789 +0000 UTC m=+155.753218818" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.016452 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-pmdsg" podStartSLOduration=135.016434669 podStartE2EDuration="2m15.016434669s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.014923606 +0000 UTC m=+155.788714645" watchObservedRunningTime="2025-10-09 13:28:00.016434669 +0000 UTC m=+155.790225708" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.049921 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.050340 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.550326462 +0000 UTC m=+156.324117501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.074114 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" podStartSLOduration=135.074095636 podStartE2EDuration="2m15.074095636s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.071420543 +0000 UTC m=+155.845211582" watchObservedRunningTime="2025-10-09 13:28:00.074095636 +0000 UTC m=+155.847886675" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.125112 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" podStartSLOduration=135.12508853 podStartE2EDuration="2m15.12508853s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.09785224 +0000 UTC m=+155.871643289" watchObservedRunningTime="2025-10-09 13:28:00.12508853 +0000 UTC m=+155.898879589" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.136050 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-l2j7b" podStartSLOduration=136.136028671 podStartE2EDuration="2m16.136028671s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.134028967 +0000 UTC m=+155.907820006" watchObservedRunningTime="2025-10-09 13:28:00.136028671 +0000 UTC m=+155.909819880" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.152058 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.152326 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.65231074 +0000 UTC m=+156.426101779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.184471 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" podStartSLOduration=135.184456645 podStartE2EDuration="2m15.184456645s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.156986188 +0000 UTC m=+155.930777227" watchObservedRunningTime="2025-10-09 13:28:00.184456645 +0000 UTC m=+155.958247684" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.254784 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.255157 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.755141201 +0000 UTC m=+156.528932240 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.355695 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.355851 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.855820203 +0000 UTC m=+156.629611252 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.355966 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.356263 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.856254815 +0000 UTC m=+156.630045854 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.457188 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.457568 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:00.957540384 +0000 UTC m=+156.731331423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.558745 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.559148 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.059129771 +0000 UTC m=+156.832920810 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.660609 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.660841 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.160810531 +0000 UTC m=+156.934601580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.660928 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.661337 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.161327194 +0000 UTC m=+156.935118233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.761593 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.761752 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.261730359 +0000 UTC m=+157.035521398 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.761946 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.762296 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.262288574 +0000 UTC m=+157.036079613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.762894 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6tzrl" event={"ID":"84933f66-f8be-458d-87b4-b49a4dbe0dc3","Type":"ContainerStarted","Data":"c0cabaf086eb2455428efc82c0128eb80ea4b61bc464026d7ddeb2bc5c9b8460"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.762939 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6tzrl" event={"ID":"84933f66-f8be-458d-87b4-b49a4dbe0dc3","Type":"ContainerStarted","Data":"6fb709d53cf0a4d6919f9c0b4f9b7ec0cc98fec6178086f9932155d6cd7ab76e"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.763070 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6tzrl" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.765956 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-924rd" event={"ID":"05d81e4d-1c18-4533-96c4-888a906b2c25","Type":"ContainerStarted","Data":"c9620795aaceb34516e4fdb169da646f2e7ce3e3a7fb8de032e7f58d6faf637b"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.766002 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-924rd" event={"ID":"05d81e4d-1c18-4533-96c4-888a906b2c25","Type":"ContainerStarted","Data":"844b645a7b23b22b156d857f697d0a9e1be099fc4e9366d76be566cc23fc8839"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.768852 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" event={"ID":"2f64ae43-a8e0-4e6c-919d-842572e8b107","Type":"ContainerStarted","Data":"36715cdf9487ab5e2bf963d0a4919e9efd5e6d39bf7771cc91b07bf1a96b1416"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.768904 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" event={"ID":"2f64ae43-a8e0-4e6c-919d-842572e8b107","Type":"ContainerStarted","Data":"ee14c873edebce0d78461007d6632e8baf01f595b7a061491a710b8e3d34eb8e"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.771994 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" event={"ID":"fe403e6f-4f08-4263-962d-377d0989c0d7","Type":"ContainerStarted","Data":"c2ed51d56b35cc07b84a24f0a43b49a30184c3d9711710a5d036333a944683f4"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.772461 4762 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9957f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.772504 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.777668 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" event={"ID":"6889890c-fe1b-47c6-9e3f-e5f23d41a1a3","Type":"ContainerStarted","Data":"fe3db22ee17ece8f80f8d7d61c8b2f7fcd3f1963326a25bd90d14e8cc3513862"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.779531 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" event={"ID":"43ee5885-188a-4435-a40f-dfb077d01a84","Type":"ContainerStarted","Data":"03de7dd4f73faa61f98993243065fa2f9af78248092b84dcf9c86bbad77a47d7"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.779781 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.782223 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" event={"ID":"e7318993-282b-4a86-907d-fc810869cc7c","Type":"ContainerStarted","Data":"36c582d90569f53daf1fe702e5eaf5c4168f5ffa4421430cb6e65dcaa7393a04"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.784773 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" event={"ID":"9a65c59f-b10d-4c4c-9890-a74abb1395ee","Type":"ContainerStarted","Data":"a383a7645969991571a2392b19adb6ae6f1d2e799d04e5def32b9810f4e9843f"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.784843 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" event={"ID":"9a65c59f-b10d-4c4c-9890-a74abb1395ee","Type":"ContainerStarted","Data":"c5000c1e5ddf6e2b36cbb63134d2637c3f5930be481cafff3720305f26211a93"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.787442 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b7276" event={"ID":"592376f8-472a-4014-bd15-b08b04ac15e8","Type":"ContainerStarted","Data":"23d50eaa510b4af89737b200cc3d69e3b8a7df9948c9cb3cf201d9e3a3a76d59"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.789841 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" event={"ID":"9c33b77a-915c-4fbe-b4f0-c7cfd8cb2fd0","Type":"ContainerStarted","Data":"5c3ad76511ee6222943bf545b9bc52fccfdb5183a8ee85e660a6d07c7fb3c9db"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.789977 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.791398 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" event={"ID":"d78ceb4a-7433-44e2-a874-37b69473fc54","Type":"ContainerStarted","Data":"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.791550 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.792505 4762 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-8x7j8 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.36:6443/healthz\": dial tcp 10.217.0.36:6443: connect: connection refused" start-of-body= Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.792560 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.36:6443/healthz\": dial tcp 10.217.0.36:6443: connect: connection refused" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.794192 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" event={"ID":"2f4c75ed-fc02-4492-832b-36a064fe8b26","Type":"ContainerStarted","Data":"7f63ac5f7dfa5ddcbe8727cd6f5f9119f3b0170079de2d97ae12fd30a58cba2e"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.796697 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" event={"ID":"410067d0-d751-4783-a663-e429c72c805c","Type":"ContainerStarted","Data":"1db5f3dbbd7b9cd0cf806baeb270676dec2f3b00e5cfaf0156858bf25f667030"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.798970 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-sx9r4" event={"ID":"a8ffac8c-bde6-4c86-9671-94764560a87a","Type":"ContainerStarted","Data":"5508e8f4f303d812e7fa736068e7e32b0be8ab592ac91a53d7723c9b8e41c43a"} Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.800674 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.800724 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.807264 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ff6kr" podStartSLOduration=135.807246472 podStartE2EDuration="2m15.807246472s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.185423661 +0000 UTC m=+155.959214700" watchObservedRunningTime="2025-10-09 13:28:00.807246472 +0000 UTC m=+156.581037511" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.807509 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6tzrl" podStartSLOduration=7.8075016900000005 podStartE2EDuration="7.80750169s" podCreationTimestamp="2025-10-09 13:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.806615555 +0000 UTC m=+156.580406624" watchObservedRunningTime="2025-10-09 13:28:00.80750169 +0000 UTC m=+156.581292729" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.810511 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.851371 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" podStartSLOduration=136.851353847 podStartE2EDuration="2m16.851353847s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.850701198 +0000 UTC m=+156.624492237" watchObservedRunningTime="2025-10-09 13:28:00.851353847 +0000 UTC m=+156.625144886" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.851568 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zn6jm" podStartSLOduration=135.851564043 podStartE2EDuration="2m15.851564043s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.83078775 +0000 UTC m=+156.604578789" watchObservedRunningTime="2025-10-09 13:28:00.851564043 +0000 UTC m=+156.625355082" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.862662 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.862817 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.362796802 +0000 UTC m=+157.136587841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.863281 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.866190 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.366174724 +0000 UTC m=+157.139965833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.917224 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-59vf6" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.927280 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:00 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:00 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:00 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.927345 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.932257 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-sx9r4" podStartSLOduration=6.932239894 podStartE2EDuration="6.932239894s" podCreationTimestamp="2025-10-09 13:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.930222538 +0000 UTC m=+156.704013577" watchObservedRunningTime="2025-10-09 13:28:00.932239894 +0000 UTC m=+156.706030933" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.932638 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" podStartSLOduration=135.932632915 podStartE2EDuration="2m15.932632915s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.90302266 +0000 UTC m=+156.676813709" watchObservedRunningTime="2025-10-09 13:28:00.932632915 +0000 UTC m=+156.706423954" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.949335 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-n8mp7" podStartSLOduration=135.949319494 podStartE2EDuration="2m15.949319494s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.947460732 +0000 UTC m=+156.721251761" watchObservedRunningTime="2025-10-09 13:28:00.949319494 +0000 UTC m=+156.723110533" Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.980134 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:00 crc kubenswrapper[4762]: E1009 13:28:00.980555 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.480540154 +0000 UTC m=+157.254331193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:00 crc kubenswrapper[4762]: I1009 13:28:00.998553 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" podStartSLOduration=135.998531119 podStartE2EDuration="2m15.998531119s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:00.998014595 +0000 UTC m=+156.771805634" watchObservedRunningTime="2025-10-09 13:28:00.998531119 +0000 UTC m=+156.772322158" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.034910 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" podStartSLOduration=136.03489437 podStartE2EDuration="2m16.03489437s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:01.031879888 +0000 UTC m=+156.805670937" watchObservedRunningTime="2025-10-09 13:28:01.03489437 +0000 UTC m=+156.808685409" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.059546 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.059867 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.063862 4762 patch_prober.go:28] interesting pod/apiserver-76f77b778f-924rd container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.064068 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-924rd" podUID="05d81e4d-1c18-4533-96c4-888a906b2c25" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.083813 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.084063 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.085201 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.086094 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.586078199 +0000 UTC m=+157.359869238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.134266 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8x6nc" podStartSLOduration=136.134249146 podStartE2EDuration="2m16.134249146s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:01.064642019 +0000 UTC m=+156.838433058" watchObservedRunningTime="2025-10-09 13:28:01.134249146 +0000 UTC m=+156.908040185" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.182963 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-924rd" podStartSLOduration=136.182942817 podStartE2EDuration="2m16.182942817s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:01.136488257 +0000 UTC m=+156.910279296" watchObservedRunningTime="2025-10-09 13:28:01.182942817 +0000 UTC m=+156.956733856" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.187188 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.187581 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.687565094 +0000 UTC m=+157.461356133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.288773 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.289350 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.789337326 +0000 UTC m=+157.563128365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.390284 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.390606 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.890592374 +0000 UTC m=+157.664383413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.492159 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.492499 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:01.99248738 +0000 UTC m=+157.766278419 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.593693 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.593908 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.093877761 +0000 UTC m=+157.867668810 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.593995 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.594352 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.094339613 +0000 UTC m=+157.868130652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.695526 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.695818 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.195779937 +0000 UTC m=+157.969570986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.696177 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.696490 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.196478106 +0000 UTC m=+157.970269145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.780060 4762 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-ljrw4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.780124 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" podUID="43ee5885-188a-4435-a40f-dfb077d01a84" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.797500 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.797791 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.297775166 +0000 UTC m=+158.071566205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.813760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" event={"ID":"410067d0-d751-4783-a663-e429c72c805c","Type":"ContainerStarted","Data":"8334ff41ce34afa79dd68123a356f03305be0d5f0d041f8cad959a42d2b51400"} Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.814278 4762 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9957f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.814319 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.899293 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:01 crc kubenswrapper[4762]: E1009 13:28:01.902347 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.402332504 +0000 UTC m=+158.176123543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.932618 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:01 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:01 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:01 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:01 crc kubenswrapper[4762]: I1009 13:28:01.932716 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.001375 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.001744 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.50172262 +0000 UTC m=+158.275513659 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.062661 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.066916 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.102347 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.102919 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.602903077 +0000 UTC m=+158.376694116 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.113070 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.114476 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.117593 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.136720 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.208052 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.208448 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.208602 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxgtn\" (UniqueName: \"kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.208793 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.208989 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.708970687 +0000 UTC m=+158.482761726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.306193 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.308032 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.310305 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.310405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxgtn\" (UniqueName: \"kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.310450 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.310489 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.311001 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.311235 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.311727 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.811715616 +0000 UTC m=+158.585506655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.312918 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.320598 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.342075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxgtn\" (UniqueName: \"kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn\") pod \"certified-operators-wpc52\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.413999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.414205 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpjt7\" (UniqueName: \"kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.414268 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.414311 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.414410 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:02.914395343 +0000 UTC m=+158.688186382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.433842 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.488347 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.489290 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.502465 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.515073 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.515112 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.515162 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.515188 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpjt7\" (UniqueName: \"kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.515984 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.01596596 +0000 UTC m=+158.789756999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.516105 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.516390 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.548354 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpjt7\" (UniqueName: \"kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7\") pod \"community-operators-jhmrt\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.617147 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.617528 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.617682 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.617834 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv8s4\" (UniqueName: \"kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.618086 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.118067621 +0000 UTC m=+158.891858660 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.633966 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.689327 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.690539 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.700124 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.719372 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv8s4\" (UniqueName: \"kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.719458 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.719511 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.719546 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.720443 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.720760 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.220745138 +0000 UTC m=+158.994536177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.721322 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.766759 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv8s4\" (UniqueName: \"kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4\") pod \"certified-operators-z5j4d\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.818733 4762 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-ljrw4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.818809 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" podUID="43ee5885-188a-4435-a40f-dfb077d01a84" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.821993 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.823231 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.823501 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwvl7\" (UniqueName: \"kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.823545 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.823578 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.823715 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.323699703 +0000 UTC m=+159.097490742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.862988 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" event={"ID":"410067d0-d751-4783-a663-e429c72c805c","Type":"ContainerStarted","Data":"12eecdcc9401a2c20678d36c16b53665b09d4ab77dcbcbf4804eb107ca051625"} Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.895074 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8pmjh" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.925292 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.925533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwvl7\" (UniqueName: \"kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.925563 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.925632 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.927067 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.927835 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:02 crc kubenswrapper[4762]: E1009 13:28:02.928658 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.428623891 +0000 UTC m=+159.202415010 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.939935 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:02 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:02 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:02 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.939998 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:02 crc kubenswrapper[4762]: I1009 13:28:02.964659 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwvl7\" (UniqueName: \"kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7\") pod \"community-operators-r6qld\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.008967 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.027581 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.027932 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.527916705 +0000 UTC m=+159.301707744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.129473 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.155834 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.655808417 +0000 UTC m=+159.429599456 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.214253 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.230806 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.231166 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.731151432 +0000 UTC m=+159.504942471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.281663 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.342385 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.342861 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.842846876 +0000 UTC m=+159.616637915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.429953 4762 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.444148 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.444453 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:03.944436774 +0000 UTC m=+159.718227813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.458665 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:28:03 crc kubenswrapper[4762]: W1009 13:28:03.471984 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08bf2dc9_588c_4ea0_b57b_25737b61c178.slice/crio-21aae81cf50707a46e79ca0bb73e69c4ea2a6265dfa624c249b6386e90d2433c WatchSource:0}: Error finding container 21aae81cf50707a46e79ca0bb73e69c4ea2a6265dfa624c249b6386e90d2433c: Status 404 returned error can't find the container with id 21aae81cf50707a46e79ca0bb73e69c4ea2a6265dfa624c249b6386e90d2433c Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.547015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.547448 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.047432339 +0000 UTC m=+159.821223378 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.549989 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:28:03 crc kubenswrapper[4762]: W1009 13:28:03.566504 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbc3d339_b1fb_4705_ab1d_6b2946e12adc.slice/crio-8822c5ccb023a4238b6810b21ff0dc173a1aa1980b2f34c1a0d977958296a063 WatchSource:0}: Error finding container 8822c5ccb023a4238b6810b21ff0dc173a1aa1980b2f34c1a0d977958296a063: Status 404 returned error can't find the container with id 8822c5ccb023a4238b6810b21ff0dc173a1aa1980b2f34c1a0d977958296a063 Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.648509 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.648699 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.148658197 +0000 UTC m=+159.922449236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.648972 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.649237 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.149229193 +0000 UTC m=+159.923020232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.750266 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.750613 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.250597774 +0000 UTC m=+160.024388813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.851662 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.852034 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.352014926 +0000 UTC m=+160.125805965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.870994 4762 generic.go:334] "Generic (PLEG): container finished" podID="3b828691-c7c6-4899-b524-d867e7f151a4" containerID="55560df7338145e9510a0524ec0899fa8addbb1ac720d828e8d646a0e0178c4f" exitCode=0 Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.871107 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" event={"ID":"3b828691-c7c6-4899-b524-d867e7f151a4","Type":"ContainerDied","Data":"55560df7338145e9510a0524ec0899fa8addbb1ac720d828e8d646a0e0178c4f"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.872849 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerStarted","Data":"b1de3575418e3af2ed7d2ab63d18e6103681f4363d3961dbe45cfd36e61ee13d"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.872887 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerStarted","Data":"21aae81cf50707a46e79ca0bb73e69c4ea2a6265dfa624c249b6386e90d2433c"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.875391 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerStarted","Data":"d748135a3609670b4dee3556da0537e6568c637cb9e3fd6927899e7bf69164e3"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.875425 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerStarted","Data":"38f749b83d8d32586071f6544e3e797ab27b5755ceb9d3ad7adb1f2080cdd30b"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.879179 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerStarted","Data":"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.879222 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerStarted","Data":"8822c5ccb023a4238b6810b21ff0dc173a1aa1980b2f34c1a0d977958296a063"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.881707 4762 generic.go:334] "Generic (PLEG): container finished" podID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerID="a0bb71c4e8fa6e7c527ba206fe6e147b1d55e9323c831c1e6f73240578d0434c" exitCode=0 Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.881997 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerDied","Data":"a0bb71c4e8fa6e7c527ba206fe6e147b1d55e9323c831c1e6f73240578d0434c"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.882045 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerStarted","Data":"8f4a98ac5dff448c77eff320ad461c9a797f7cb9c66a70b451733b048910f5b9"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.883508 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.890565 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" event={"ID":"410067d0-d751-4783-a663-e429c72c805c","Type":"ContainerStarted","Data":"ee642a61778898f8f745b6940e8fc7259be12a8f9c36604c8b41258cef9a63de"} Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.920143 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-pmkqc" podStartSLOduration=10.920123952 podStartE2EDuration="10.920123952s" podCreationTimestamp="2025-10-09 13:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:03.91934141 +0000 UTC m=+159.693132459" watchObservedRunningTime="2025-10-09 13:28:03.920123952 +0000 UTC m=+159.693914991" Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.932964 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:03 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:03 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:03 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.933044 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.952536 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.952852 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.452826742 +0000 UTC m=+160.226617781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:03 crc kubenswrapper[4762]: I1009 13:28:03.953119 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:03 crc kubenswrapper[4762]: E1009 13:28:03.954257 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.454246531 +0000 UTC m=+160.228037570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.054759 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:04 crc kubenswrapper[4762]: E1009 13:28:04.055138 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.555119328 +0000 UTC m=+160.328910367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.083424 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.084596 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.087259 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.097076 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.133204 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.134953 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.150618 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.151019 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.158301 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.158346 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp9qn\" (UniqueName: \"kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.158380 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.158454 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:04 crc kubenswrapper[4762]: E1009 13:28:04.158724 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 13:28:04.658713211 +0000 UTC m=+160.432504250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2gj5n" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.159004 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.164930 4762 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-09T13:28:03.430199032Z","Handler":null,"Name":""} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.180595 4762 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.180647 4762 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.259838 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260037 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260065 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260130 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp9qn\" (UniqueName: \"kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260163 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260201 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260858 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.260974 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.277811 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.281788 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp9qn\" (UniqueName: \"kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn\") pod \"redhat-marketplace-85rnj\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.361731 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.361825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.361882 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.362227 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.372242 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.372288 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.379190 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.392904 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2gj5n\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.400278 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.460183 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.492971 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.499488 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.499611 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.502240 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.568787 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pqx9\" (UniqueName: \"kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.568836 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.569005 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.670669 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.670760 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.670814 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pqx9\" (UniqueName: \"kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.671467 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.671725 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.690998 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.693621 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pqx9\" (UniqueName: \"kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9\") pod \"redhat-marketplace-7vxtk\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.736658 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:28:04 crc kubenswrapper[4762]: W1009 13:28:04.742163 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod913065b9_29bd_4c9a_8d6e_e319bf91efbd.slice/crio-0fb8d0a740eafaa490823a38c4c92a4bd7d2e3aecf8358ea7c4c8ff66421bda8 WatchSource:0}: Error finding container 0fb8d0a740eafaa490823a38c4c92a4bd7d2e3aecf8358ea7c4c8ff66421bda8: Status 404 returned error can't find the container with id 0fb8d0a740eafaa490823a38c4c92a4bd7d2e3aecf8358ea7c4c8ff66421bda8 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.809359 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.813761 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:28:04 crc kubenswrapper[4762]: W1009 13:28:04.851541 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff2e2ba5_4651_4774_a428_10af2c71736d.slice/crio-7a582a0760c79a125829e8db3e0ff872a5e3d3e9fb586a7a347432faa323ed93 WatchSource:0}: Error finding container 7a582a0760c79a125829e8db3e0ff872a5e3d3e9fb586a7a347432faa323ed93: Status 404 returned error can't find the container with id 7a582a0760c79a125829e8db3e0ff872a5e3d3e9fb586a7a347432faa323ed93 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.911033 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2e690132-1cdf-463d-9db6-0ee737075217","Type":"ContainerStarted","Data":"94a28313b894387417bc80595ec98839793f8fa48fd8076a97674cd929c14c72"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.912763 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerID="d748135a3609670b4dee3556da0537e6568c637cb9e3fd6927899e7bf69164e3" exitCode=0 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.912803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerDied","Data":"d748135a3609670b4dee3556da0537e6568c637cb9e3fd6927899e7bf69164e3"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.920600 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerID="5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033" exitCode=0 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.920698 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerDied","Data":"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.923561 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" event={"ID":"913065b9-29bd-4c9a-8d6e-e319bf91efbd","Type":"ContainerStarted","Data":"0fb8d0a740eafaa490823a38c4c92a4bd7d2e3aecf8358ea7c4c8ff66421bda8"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.926853 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerStarted","Data":"7a582a0760c79a125829e8db3e0ff872a5e3d3e9fb586a7a347432faa323ed93"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.927829 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:04 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:04 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:04 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.928021 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.929181 4762 generic.go:334] "Generic (PLEG): container finished" podID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerID="b1de3575418e3af2ed7d2ab63d18e6103681f4363d3961dbe45cfd36e61ee13d" exitCode=0 Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.929888 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerDied","Data":"b1de3575418e3af2ed7d2ab63d18e6103681f4363d3961dbe45cfd36e61ee13d"} Oct 09 13:28:04 crc kubenswrapper[4762]: I1009 13:28:04.983849 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.053884 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.219772 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.259388 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8zwlj" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.259868 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.281940 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvbns\" (UniqueName: \"kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns\") pod \"3b828691-c7c6-4899-b524-d867e7f151a4\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.282018 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume\") pod \"3b828691-c7c6-4899-b524-d867e7f151a4\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.282044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume\") pod \"3b828691-c7c6-4899-b524-d867e7f151a4\" (UID: \"3b828691-c7c6-4899-b524-d867e7f151a4\") " Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.283548 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume" (OuterVolumeSpecName: "config-volume") pod "3b828691-c7c6-4899-b524-d867e7f151a4" (UID: "3b828691-c7c6-4899-b524-d867e7f151a4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.288930 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3b828691-c7c6-4899-b524-d867e7f151a4" (UID: "3b828691-c7c6-4899-b524-d867e7f151a4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.289957 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns" (OuterVolumeSpecName: "kube-api-access-mvbns") pod "3b828691-c7c6-4899-b524-d867e7f151a4" (UID: "3b828691-c7c6-4899-b524-d867e7f151a4"). InnerVolumeSpecName "kube-api-access-mvbns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.383660 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvbns\" (UniqueName: \"kubernetes.io/projected/3b828691-c7c6-4899-b524-d867e7f151a4-kube-api-access-mvbns\") on node \"crc\" DevicePath \"\"" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.383699 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3b828691-c7c6-4899-b524-d867e7f151a4-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.383711 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3b828691-c7c6-4899-b524-d867e7f151a4-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.483134 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:28:05 crc kubenswrapper[4762]: E1009 13:28:05.483722 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b828691-c7c6-4899-b524-d867e7f151a4" containerName="collect-profiles" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.483739 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b828691-c7c6-4899-b524-d867e7f151a4" containerName="collect-profiles" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.483856 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b828691-c7c6-4899-b524-d867e7f151a4" containerName="collect-profiles" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.484593 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.486536 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.498156 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.586944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.587017 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.587111 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkzlr\" (UniqueName: \"kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.688169 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.688235 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.688263 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkzlr\" (UniqueName: \"kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.688792 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.688790 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.705731 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkzlr\" (UniqueName: \"kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr\") pod \"redhat-operators-2hbjn\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.879915 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.881144 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.894993 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.926863 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:05 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:05 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:05 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.926921 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.930586 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:28:05 crc kubenswrapper[4762]: E1009 13:28:05.984435 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod2e690132_1cdf_463d_9db6_0ee737075217.slice/crio-917e38e67aba63b2d5aa226ce711665fe174347c5afdd5b29e17f173aaf78880.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:28:05 crc kubenswrapper[4762]: I1009 13:28:05.987157 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2e690132-1cdf-463d-9db6-0ee737075217","Type":"ContainerStarted","Data":"917e38e67aba63b2d5aa226ce711665fe174347c5afdd5b29e17f173aaf78880"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.992269 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" event={"ID":"913065b9-29bd-4c9a-8d6e-e319bf91efbd","Type":"ContainerStarted","Data":"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.992829 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.996369 4762 generic.go:334] "Generic (PLEG): container finished" podID="09afe313-21ce-4c9b-8a66-299d430a7903" containerID="c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195" exitCode=0 Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.996417 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerDied","Data":"c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.996449 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerStarted","Data":"a9cf734357b47ece4ec2424830bbab8d83a302575fafeaedea1101a6b09e87f1"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.998061 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.998096 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgnrf\" (UniqueName: \"kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.998155 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.999000 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" event={"ID":"3b828691-c7c6-4899-b524-d867e7f151a4","Type":"ContainerDied","Data":"6babeb0114ff6869fc506ae7054446face7219df1f0ac544183b4555f624c3ea"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.999021 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6babeb0114ff6869fc506ae7054446face7219df1f0ac544183b4555f624c3ea" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:05.999065 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.016902 4762 generic.go:334] "Generic (PLEG): container finished" podID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerID="a8f50281a01652ded24d989a78c23da5f6bb6993018869142bf1aa47eeb9400b" exitCode=0 Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.016953 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerDied","Data":"a8f50281a01652ded24d989a78c23da5f6bb6993018869142bf1aa47eeb9400b"} Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.067837 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.067815336 podStartE2EDuration="2.067815336s" podCreationTimestamp="2025-10-09 13:28:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:06.01492744 +0000 UTC m=+161.788718499" watchObservedRunningTime="2025-10-09 13:28:06.067815336 +0000 UTC m=+161.841606375" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.071857 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" podStartSLOduration=141.071840506 podStartE2EDuration="2m21.071840506s" podCreationTimestamp="2025-10-09 13:25:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:06.05202612 +0000 UTC m=+161.825817159" watchObservedRunningTime="2025-10-09 13:28:06.071840506 +0000 UTC m=+161.845631545" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.073165 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.085655 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-924rd" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.099604 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.099732 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.099789 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgnrf\" (UniqueName: \"kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.112269 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.112584 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.137573 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgnrf\" (UniqueName: \"kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf\") pod \"redhat-operators-jcvrf\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.156255 4762 patch_prober.go:28] interesting pod/console-f9d7485db-r5hfv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.156312 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-r5hfv" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.157210 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.157353 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.219971 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.326417 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.409311 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.409686 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.409427 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.409808 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.514199 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:28:06 crc kubenswrapper[4762]: W1009 13:28:06.523291 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod821e572e_77d5_4661_9dec_7da0cad19a4d.slice/crio-ffd0239aacd99ad969d81584fa143b5188f7e93045375b9a8a78be8ff3e6b5fc WatchSource:0}: Error finding container ffd0239aacd99ad969d81584fa143b5188f7e93045375b9a8a78be8ff3e6b5fc: Status 404 returned error can't find the container with id ffd0239aacd99ad969d81584fa143b5188f7e93045375b9a8a78be8ff3e6b5fc Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.727393 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.734292 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a76399-c2ae-487b-a52c-f0e271fb1d20-metrics-certs\") pod \"network-metrics-daemon-k4bwn\" (UID: \"f9a76399-c2ae-487b-a52c-f0e271fb1d20\") " pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.924068 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.927766 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:06 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:06 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:06 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.927845 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.974964 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k4bwn" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.987253 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:28:06 crc kubenswrapper[4762]: I1009 13:28:06.997817 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ljrw4" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.050777 4762 generic.go:334] "Generic (PLEG): container finished" podID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerID="315071a6e84ff8085750efcc6f506f8336dac2f3641ac7968b45c3d2555e832d" exitCode=0 Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.050883 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerDied","Data":"315071a6e84ff8085750efcc6f506f8336dac2f3641ac7968b45c3d2555e832d"} Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.050911 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerStarted","Data":"ffd0239aacd99ad969d81584fa143b5188f7e93045375b9a8a78be8ff3e6b5fc"} Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.054962 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerID="1c4847e2e2771fd049d0cc794c22ea90f734fbf9449725b301b66c1eb62cd17b" exitCode=0 Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.055053 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerDied","Data":"1c4847e2e2771fd049d0cc794c22ea90f734fbf9449725b301b66c1eb62cd17b"} Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.055100 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerStarted","Data":"b4b7bdc0e5fc90a3df035708ec76024e8c42f944de5a954620b62eb3fd3251e3"} Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.057623 4762 generic.go:334] "Generic (PLEG): container finished" podID="2e690132-1cdf-463d-9db6-0ee737075217" containerID="917e38e67aba63b2d5aa226ce711665fe174347c5afdd5b29e17f173aaf78880" exitCode=0 Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.058037 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2e690132-1cdf-463d-9db6-0ee737075217","Type":"ContainerDied","Data":"917e38e67aba63b2d5aa226ce711665fe174347c5afdd5b29e17f173aaf78880"} Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.293874 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k4bwn"] Oct 09 13:28:07 crc kubenswrapper[4762]: W1009 13:28:07.318530 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9a76399_c2ae_487b_a52c_f0e271fb1d20.slice/crio-77227048cd7784167004bf0bc790bb362d4daff862567e46bfb2960593762b5d WatchSource:0}: Error finding container 77227048cd7784167004bf0bc790bb362d4daff862567e46bfb2960593762b5d: Status 404 returned error can't find the container with id 77227048cd7784167004bf0bc790bb362d4daff862567e46bfb2960593762b5d Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.443151 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.446070 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.451384 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.451753 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.455691 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.546713 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.547538 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.649247 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.649324 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.649412 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.669127 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.777859 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.927492 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:07 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:07 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:07 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:07 crc kubenswrapper[4762]: I1009 13:28:07.927577 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:08 crc kubenswrapper[4762]: I1009 13:28:08.071441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" event={"ID":"f9a76399-c2ae-487b-a52c-f0e271fb1d20","Type":"ContainerStarted","Data":"a107808fd8bf0b5ba83a5915c9793cd9a79a20ee55cc05fa2d3f2c59a1929443"} Oct 09 13:28:08 crc kubenswrapper[4762]: I1009 13:28:08.071479 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" event={"ID":"f9a76399-c2ae-487b-a52c-f0e271fb1d20","Type":"ContainerStarted","Data":"77227048cd7784167004bf0bc790bb362d4daff862567e46bfb2960593762b5d"} Oct 09 13:28:08 crc kubenswrapper[4762]: I1009 13:28:08.935846 4762 patch_prober.go:28] interesting pod/router-default-5444994796-8tz5k container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 13:28:08 crc kubenswrapper[4762]: [-]has-synced failed: reason withheld Oct 09 13:28:08 crc kubenswrapper[4762]: [+]process-running ok Oct 09 13:28:08 crc kubenswrapper[4762]: healthz check failed Oct 09 13:28:08 crc kubenswrapper[4762]: I1009 13:28:08.936241 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-8tz5k" podUID="10df5c6e-5eb3-4ba8-b98d-805b82bd6ca5" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 13:28:09 crc kubenswrapper[4762]: I1009 13:28:09.047317 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6tzrl" Oct 09 13:28:09 crc kubenswrapper[4762]: I1009 13:28:09.926882 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:28:09 crc kubenswrapper[4762]: I1009 13:28:09.929103 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-8tz5k" Oct 09 13:28:11 crc kubenswrapper[4762]: I1009 13:28:11.970409 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:28:11 crc kubenswrapper[4762]: I1009 13:28:11.970476 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.162991 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.167278 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.408613 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.408687 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.408697 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:16 crc kubenswrapper[4762]: I1009 13:28:16.408761 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.563964 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.585384 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir\") pod \"2e690132-1cdf-463d-9db6-0ee737075217\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.585504 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access\") pod \"2e690132-1cdf-463d-9db6-0ee737075217\" (UID: \"2e690132-1cdf-463d-9db6-0ee737075217\") " Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.586503 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2e690132-1cdf-463d-9db6-0ee737075217" (UID: "2e690132-1cdf-463d-9db6-0ee737075217"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.592757 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2e690132-1cdf-463d-9db6-0ee737075217" (UID: "2e690132-1cdf-463d-9db6-0ee737075217"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.686583 4762 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2e690132-1cdf-463d-9db6-0ee737075217-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 13:28:17 crc kubenswrapper[4762]: I1009 13:28:17.686649 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e690132-1cdf-463d-9db6-0ee737075217-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:28:18 crc kubenswrapper[4762]: I1009 13:28:18.143831 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2e690132-1cdf-463d-9db6-0ee737075217","Type":"ContainerDied","Data":"94a28313b894387417bc80595ec98839793f8fa48fd8076a97674cd929c14c72"} Oct 09 13:28:18 crc kubenswrapper[4762]: I1009 13:28:18.143906 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94a28313b894387417bc80595ec98839793f8fa48fd8076a97674cd929c14c72" Oct 09 13:28:18 crc kubenswrapper[4762]: I1009 13:28:18.144002 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 13:28:23 crc kubenswrapper[4762]: I1009 13:28:23.134412 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 13:28:24 crc kubenswrapper[4762]: I1009 13:28:24.510082 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.408024 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.408225 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.408422 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.408362 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.408501 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.409098 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"4e33a471d44f5bab0efe51351133ae34b4560ade3647bc77f0d225a63bb3ca77"} pod="openshift-console/downloads-7954f5f757-l2j7b" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.409189 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" containerID="cri-o://4e33a471d44f5bab0efe51351133ae34b4560ade3647bc77f0d225a63bb3ca77" gracePeriod=2 Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.409683 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:26 crc kubenswrapper[4762]: I1009 13:28:26.409770 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:33 crc kubenswrapper[4762]: I1009 13:28:33.224683 4762 generic.go:334] "Generic (PLEG): container finished" podID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerID="4e33a471d44f5bab0efe51351133ae34b4560ade3647bc77f0d225a63bb3ca77" exitCode=0 Oct 09 13:28:33 crc kubenswrapper[4762]: I1009 13:28:33.224745 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l2j7b" event={"ID":"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b","Type":"ContainerDied","Data":"4e33a471d44f5bab0efe51351133ae34b4560ade3647bc77f0d225a63bb3ca77"} Oct 09 13:28:36 crc kubenswrapper[4762]: I1009 13:28:36.002946 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vj7lx" Oct 09 13:28:36 crc kubenswrapper[4762]: I1009 13:28:36.409349 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:36 crc kubenswrapper[4762]: I1009 13:28:36.409412 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:41 crc kubenswrapper[4762]: I1009 13:28:41.969427 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:28:41 crc kubenswrapper[4762]: I1009 13:28:41.969923 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:28:42 crc kubenswrapper[4762]: E1009 13:28:42.839075 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 09 13:28:42 crc kubenswrapper[4762]: E1009 13:28:42.839353 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpjt7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-jhmrt_openshift-marketplace(2c17d894-7ee5-44c4-b64a-c05be6870a3d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:42 crc kubenswrapper[4762]: E1009 13:28:42.840797 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-jhmrt" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" Oct 09 13:28:43 crc kubenswrapper[4762]: I1009 13:28:43.224316 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 13:28:43 crc kubenswrapper[4762]: W1009 13:28:43.247365 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod63d62b58_053c_45ff_a512_b39899de2ae9.slice/crio-d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380 WatchSource:0}: Error finding container d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380: Status 404 returned error can't find the container with id d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380 Oct 09 13:28:43 crc kubenswrapper[4762]: I1009 13:28:43.304830 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63d62b58-053c-45ff-a512-b39899de2ae9","Type":"ContainerStarted","Data":"d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380"} Oct 09 13:28:43 crc kubenswrapper[4762]: E1009 13:28:43.307231 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-jhmrt" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.161421 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.161702 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cxgtn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wpc52_openshift-marketplace(4de8e24c-273f-4ff9-83c2-9dd8952c3d74): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.162880 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wpc52" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.311577 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k4bwn" event={"ID":"f9a76399-c2ae-487b-a52c-f0e271fb1d20","Type":"ContainerStarted","Data":"63de9fe87d3ab1ceba3a26055cdd115c34e1785640d1d0ae2cb6b67952e1c1b9"} Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.313681 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-l2j7b" event={"ID":"50b92b8d-158e-4ec4-aaed-a5c83aafeb8b","Type":"ContainerStarted","Data":"6463ebd30c20705c84b863fa3ee832bd40a88a15a9ecf4aac44f499b210982de"} Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.314087 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.317806 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.317858 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:44 crc kubenswrapper[4762]: I1009 13:28:44.318876 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63d62b58-053c-45ff-a512-b39899de2ae9","Type":"ContainerStarted","Data":"129ba95fdd765460342015a281df08cd2c5b98e4c69fcaebe2330a1a91f16f52"} Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.320295 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wpc52" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.856947 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.857326 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lwvl7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-r6qld_openshift-marketplace(fbc3d339-b1fb-4705-ab1d-6b2946e12adc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:44 crc kubenswrapper[4762]: E1009 13:28:44.858522 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-r6qld" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" Oct 09 13:28:45 crc kubenswrapper[4762]: I1009 13:28:45.323820 4762 generic.go:334] "Generic (PLEG): container finished" podID="63d62b58-053c-45ff-a512-b39899de2ae9" containerID="129ba95fdd765460342015a281df08cd2c5b98e4c69fcaebe2330a1a91f16f52" exitCode=0 Oct 09 13:28:45 crc kubenswrapper[4762]: I1009 13:28:45.323895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63d62b58-053c-45ff-a512-b39899de2ae9","Type":"ContainerDied","Data":"129ba95fdd765460342015a281df08cd2c5b98e4c69fcaebe2330a1a91f16f52"} Oct 09 13:28:45 crc kubenswrapper[4762]: I1009 13:28:45.324643 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:45 crc kubenswrapper[4762]: I1009 13:28:45.324695 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:45 crc kubenswrapper[4762]: E1009 13:28:45.325083 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-r6qld" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" Oct 09 13:28:45 crc kubenswrapper[4762]: I1009 13:28:45.384417 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-k4bwn" podStartSLOduration=181.384401378 podStartE2EDuration="3m1.384401378s" podCreationTimestamp="2025-10-09 13:25:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:28:45.38376703 +0000 UTC m=+201.157558069" watchObservedRunningTime="2025-10-09 13:28:45.384401378 +0000 UTC m=+201.158192417" Oct 09 13:28:46 crc kubenswrapper[4762]: I1009 13:28:46.407753 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:46 crc kubenswrapper[4762]: I1009 13:28:46.408057 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:46 crc kubenswrapper[4762]: I1009 13:28:46.420843 4762 patch_prober.go:28] interesting pod/downloads-7954f5f757-l2j7b container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 13:28:46 crc kubenswrapper[4762]: I1009 13:28:46.420951 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-l2j7b" podUID="50b92b8d-158e-4ec4-aaed-a5c83aafeb8b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 13:28:50 crc kubenswrapper[4762]: E1009 13:28:50.761227 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 09 13:28:50 crc kubenswrapper[4762]: E1009 13:28:50.761919 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sv8s4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-z5j4d_openshift-marketplace(08bf2dc9-588c-4ea0-b57b-25737b61c178): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:50 crc kubenswrapper[4762]: E1009 13:28:50.763333 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-z5j4d" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" Oct 09 13:28:51 crc kubenswrapper[4762]: E1009 13:28:51.935671 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 09 13:28:51 crc kubenswrapper[4762]: E1009 13:28:51.936058 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wgnrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-jcvrf_openshift-marketplace(821e572e-77d5-4661-9dec-7da0cad19a4d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:51 crc kubenswrapper[4762]: E1009 13:28:51.937249 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-jcvrf" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" Oct 09 13:28:52 crc kubenswrapper[4762]: E1009 13:28:52.728623 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 09 13:28:52 crc kubenswrapper[4762]: E1009 13:28:52.728816 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rkzlr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-2hbjn_openshift-marketplace(1f2bb561-6df4-46da-b21a-1d5621f45ab9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:28:52 crc kubenswrapper[4762]: E1009 13:28:52.730362 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-2hbjn" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" Oct 09 13:28:56 crc kubenswrapper[4762]: I1009 13:28:56.433911 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-l2j7b" Oct 09 13:29:02 crc kubenswrapper[4762]: E1009 13:29:02.189999 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-2hbjn" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.251398 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.434695 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access\") pod \"63d62b58-053c-45ff-a512-b39899de2ae9\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.434918 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir\") pod \"63d62b58-053c-45ff-a512-b39899de2ae9\" (UID: \"63d62b58-053c-45ff-a512-b39899de2ae9\") " Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.436574 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "63d62b58-053c-45ff-a512-b39899de2ae9" (UID: "63d62b58-053c-45ff-a512-b39899de2ae9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.440425 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"63d62b58-053c-45ff-a512-b39899de2ae9","Type":"ContainerDied","Data":"d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380"} Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.440506 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5ad8f73eeb06f00dbcc2c2897268f4be05d76feb7066f1183a2ff898eb5f380" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.440604 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.442104 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "63d62b58-053c-45ff-a512-b39899de2ae9" (UID: "63d62b58-053c-45ff-a512-b39899de2ae9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.537383 4762 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/63d62b58-053c-45ff-a512-b39899de2ae9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 13:29:02 crc kubenswrapper[4762]: I1009 13:29:02.537432 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63d62b58-053c-45ff-a512-b39899de2ae9-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 13:29:10 crc kubenswrapper[4762]: E1009 13:29:10.803726 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 09 13:29:10 crc kubenswrapper[4762]: E1009 13:29:10.804560 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7pqx9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7vxtk_openshift-marketplace(09afe313-21ce-4c9b-8a66-299d430a7903): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:29:10 crc kubenswrapper[4762]: E1009 13:29:10.805875 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7vxtk" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" Oct 09 13:29:11 crc kubenswrapper[4762]: E1009 13:29:11.501931 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7vxtk" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" Oct 09 13:29:11 crc kubenswrapper[4762]: I1009 13:29:11.970123 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:29:11 crc kubenswrapper[4762]: I1009 13:29:11.970200 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:29:11 crc kubenswrapper[4762]: I1009 13:29:11.970403 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:29:11 crc kubenswrapper[4762]: I1009 13:29:11.971510 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:29:11 crc kubenswrapper[4762]: I1009 13:29:11.971589 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6" gracePeriod=600 Oct 09 13:29:14 crc kubenswrapper[4762]: I1009 13:29:14.513126 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6" exitCode=0 Oct 09 13:29:14 crc kubenswrapper[4762]: I1009 13:29:14.513432 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6"} Oct 09 13:29:16 crc kubenswrapper[4762]: E1009 13:29:16.464960 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 09 13:29:16 crc kubenswrapper[4762]: E1009 13:29:16.465191 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wp9qn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-85rnj_openshift-marketplace(ff2e2ba5-4651-4774-a428-10af2c71736d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 13:29:16 crc kubenswrapper[4762]: E1009 13:29:16.466425 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-85rnj" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" Oct 09 13:29:24 crc kubenswrapper[4762]: E1009 13:29:24.629842 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-85rnj" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" Oct 09 13:29:35 crc kubenswrapper[4762]: I1009 13:29:35.636325 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.645422 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerStarted","Data":"668cf738aa263f9b172b9edab5f75a8a2fe2414026b7265b6387ec6c2ba68160"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.647952 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerStarted","Data":"6779ccb274469de2b9e2c1f300cb3adad77cdb8d87dd1198d25ccad4a7b7e65f"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.650686 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerStarted","Data":"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.654107 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerStarted","Data":"11694a4cc1f0f822fe492f7db6c53cd3d819e37a9be3970be0ca2f0ab7878601"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.656487 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerStarted","Data":"e0e74b0a8b5a0998cf42adaf33d79f4d0c1386fce5a42ffcd542ed7e0142c991"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.658749 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerStarted","Data":"5fb908db3bccec818fd51345cdca347dd4afd33fe37059fa68fa568f08e546b1"} Oct 09 13:29:36 crc kubenswrapper[4762]: I1009 13:29:36.660660 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerStarted","Data":"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56"} Oct 09 13:29:37 crc kubenswrapper[4762]: E1009 13:29:37.068991 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbc3d339_b1fb_4705_ab1d_6b2946e12adc.slice/crio-conmon-8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.667237 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerID="8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.667343 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerDied","Data":"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.670803 4762 generic.go:334] "Generic (PLEG): container finished" podID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerID="668cf738aa263f9b172b9edab5f75a8a2fe2414026b7265b6387ec6c2ba68160" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.670869 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerDied","Data":"668cf738aa263f9b172b9edab5f75a8a2fe2414026b7265b6387ec6c2ba68160"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.679655 4762 generic.go:334] "Generic (PLEG): container finished" podID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerID="6779ccb274469de2b9e2c1f300cb3adad77cdb8d87dd1198d25ccad4a7b7e65f" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.679739 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerDied","Data":"6779ccb274469de2b9e2c1f300cb3adad77cdb8d87dd1198d25ccad4a7b7e65f"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.684025 4762 generic.go:334] "Generic (PLEG): container finished" podID="09afe313-21ce-4c9b-8a66-299d430a7903" containerID="7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.684071 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerDied","Data":"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.695134 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerID="11694a4cc1f0f822fe492f7db6c53cd3d819e37a9be3970be0ca2f0ab7878601" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.695249 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerDied","Data":"11694a4cc1f0f822fe492f7db6c53cd3d819e37a9be3970be0ca2f0ab7878601"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.699092 4762 generic.go:334] "Generic (PLEG): container finished" podID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerID="e0e74b0a8b5a0998cf42adaf33d79f4d0c1386fce5a42ffcd542ed7e0142c991" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.699149 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerDied","Data":"e0e74b0a8b5a0998cf42adaf33d79f4d0c1386fce5a42ffcd542ed7e0142c991"} Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.702627 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerID="5fb908db3bccec818fd51345cdca347dd4afd33fe37059fa68fa568f08e546b1" exitCode=0 Oct 09 13:29:37 crc kubenswrapper[4762]: I1009 13:29:37.702725 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerDied","Data":"5fb908db3bccec818fd51345cdca347dd4afd33fe37059fa68fa568f08e546b1"} Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.133558 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w"] Oct 09 13:30:00 crc kubenswrapper[4762]: E1009 13:30:00.134378 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d62b58-053c-45ff-a512-b39899de2ae9" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.134394 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d62b58-053c-45ff-a512-b39899de2ae9" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: E1009 13:30:00.134417 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e690132-1cdf-463d-9db6-0ee737075217" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.134427 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e690132-1cdf-463d-9db6-0ee737075217" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.134552 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d62b58-053c-45ff-a512-b39899de2ae9" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.134569 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e690132-1cdf-463d-9db6-0ee737075217" containerName="pruner" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.135148 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.136936 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.137217 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.141118 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w"] Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.262068 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.262407 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.262452 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcgb8\" (UniqueName: \"kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.364044 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcgb8\" (UniqueName: \"kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.364172 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.364200 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.365190 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.370118 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.381364 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcgb8\" (UniqueName: \"kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8\") pod \"collect-profiles-29333610-fjr7w\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:00 crc kubenswrapper[4762]: I1009 13:30:00.453999 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:03 crc kubenswrapper[4762]: I1009 13:30:03.888193 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w"] Oct 09 13:30:03 crc kubenswrapper[4762]: W1009 13:30:03.896934 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b86678f_707c_438a_afc6_a6475c9a46c3.slice/crio-5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402 WatchSource:0}: Error finding container 5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402: Status 404 returned error can't find the container with id 5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402 Oct 09 13:30:04 crc kubenswrapper[4762]: I1009 13:30:04.852622 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" event={"ID":"5b86678f-707c-438a-afc6-a6475c9a46c3","Type":"ContainerStarted","Data":"5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:05.864104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerStarted","Data":"37b676a90e1a450abb55e7a1d4fe8b37081c21e83408d32fd7fc0abd9d617067"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.874998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerStarted","Data":"92b4a6be1c317d6790eb59c8dd769941f5fa0e4cdde697200e51f52e5dd78c64"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.877496 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerStarted","Data":"a54c44462acb20bfa5c92c653424f4cc44d835ec5986a7435128ccd9eeca4720"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.880251 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerStarted","Data":"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.883899 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerStarted","Data":"f0accd3794d8cf0fa68edc4606868dcbe601e619a853babd9e3a1fd9cdbdbff8"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.888835 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerStarted","Data":"9e96ac38abb47aa753634809d2416011ac3a2a87e5a7e854ecc63f45e6306c46"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.892181 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerStarted","Data":"55f12de18f2d336c577d2ff4f9e63713636ad0c1f479922614aa6c6d35b378e7"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.895755 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerStarted","Data":"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:06.897790 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" event={"ID":"5b86678f-707c-438a-afc6-a6475c9a46c3","Type":"ContainerStarted","Data":"e882a2019eef9efc9666557c161ad28de6d8e3ca8387165480230311abb796fb"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:07.913994 4762 generic.go:334] "Generic (PLEG): container finished" podID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerID="f0accd3794d8cf0fa68edc4606868dcbe601e619a853babd9e3a1fd9cdbdbff8" exitCode=0 Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:07.914082 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerDied","Data":"f0accd3794d8cf0fa68edc4606868dcbe601e619a853babd9e3a1fd9cdbdbff8"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:07.967072 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2hbjn" podStartSLOduration=21.588440497 podStartE2EDuration="2m2.967040845s" podCreationTimestamp="2025-10-09 13:28:05 +0000 UTC" firstStartedPulling="2025-10-09 13:28:07.056341584 +0000 UTC m=+162.830132623" lastFinishedPulling="2025-10-09 13:29:48.434941932 +0000 UTC m=+264.208732971" observedRunningTime="2025-10-09 13:30:07.966164202 +0000 UTC m=+283.739955241" watchObservedRunningTime="2025-10-09 13:30:07.967040845 +0000 UTC m=+283.740831904" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:07.969975 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r6qld" podStartSLOduration=10.174296903 podStartE2EDuration="2m5.969965424s" podCreationTimestamp="2025-10-09 13:28:02 +0000 UTC" firstStartedPulling="2025-10-09 13:28:04.921904514 +0000 UTC m=+160.695695553" lastFinishedPulling="2025-10-09 13:30:00.717573035 +0000 UTC m=+276.491364074" observedRunningTime="2025-10-09 13:30:07.950144978 +0000 UTC m=+283.723936087" watchObservedRunningTime="2025-10-09 13:30:07.969965424 +0000 UTC m=+283.743756473" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:08.924387 4762 generic.go:334] "Generic (PLEG): container finished" podID="5b86678f-707c-438a-afc6-a6475c9a46c3" containerID="e882a2019eef9efc9666557c161ad28de6d8e3ca8387165480230311abb796fb" exitCode=0 Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:08.924473 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" event={"ID":"5b86678f-707c-438a-afc6-a6475c9a46c3","Type":"ContainerDied","Data":"e882a2019eef9efc9666557c161ad28de6d8e3ca8387165480230311abb796fb"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:08.963338 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z5j4d" podStartSLOduration=8.708776837 podStartE2EDuration="2m6.963318757s" podCreationTimestamp="2025-10-09 13:28:02 +0000 UTC" firstStartedPulling="2025-10-09 13:28:04.931735555 +0000 UTC m=+160.705526594" lastFinishedPulling="2025-10-09 13:30:03.186277435 +0000 UTC m=+278.960068514" observedRunningTime="2025-10-09 13:30:08.958779725 +0000 UTC m=+284.732570764" watchObservedRunningTime="2025-10-09 13:30:08.963318757 +0000 UTC m=+284.737109796" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:08.981946 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7vxtk" podStartSLOduration=22.030021476 podStartE2EDuration="2m4.981927211s" podCreationTimestamp="2025-10-09 13:28:04 +0000 UTC" firstStartedPulling="2025-10-09 13:28:05.997981023 +0000 UTC m=+161.771772062" lastFinishedPulling="2025-10-09 13:29:48.949886728 +0000 UTC m=+264.723677797" observedRunningTime="2025-10-09 13:30:08.979419862 +0000 UTC m=+284.753210901" watchObservedRunningTime="2025-10-09 13:30:08.981927211 +0000 UTC m=+284.755718250" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:09.003157 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jcvrf" podStartSLOduration=9.838126349 podStartE2EDuration="2m4.003137484s" podCreationTimestamp="2025-10-09 13:28:05 +0000 UTC" firstStartedPulling="2025-10-09 13:28:07.056865838 +0000 UTC m=+162.830656877" lastFinishedPulling="2025-10-09 13:30:01.221876973 +0000 UTC m=+276.995668012" observedRunningTime="2025-10-09 13:30:08.998886239 +0000 UTC m=+284.772677288" watchObservedRunningTime="2025-10-09 13:30:09.003137484 +0000 UTC m=+284.776928523" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:09.037159 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jhmrt" podStartSLOduration=11.237491096 podStartE2EDuration="2m7.037133095s" podCreationTimestamp="2025-10-09 13:28:02 +0000 UTC" firstStartedPulling="2025-10-09 13:28:04.918349147 +0000 UTC m=+160.692140186" lastFinishedPulling="2025-10-09 13:30:00.717991156 +0000 UTC m=+276.491782185" observedRunningTime="2025-10-09 13:30:09.034445332 +0000 UTC m=+284.808236391" watchObservedRunningTime="2025-10-09 13:30:09.037133095 +0000 UTC m=+284.810924124" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:09.055627 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wpc52" podStartSLOduration=17.384287167 podStartE2EDuration="2m7.055608075s" podCreationTimestamp="2025-10-09 13:28:02 +0000 UTC" firstStartedPulling="2025-10-09 13:28:03.883198595 +0000 UTC m=+159.656989644" lastFinishedPulling="2025-10-09 13:29:53.554519483 +0000 UTC m=+269.328310552" observedRunningTime="2025-10-09 13:30:09.052525572 +0000 UTC m=+284.826316631" watchObservedRunningTime="2025-10-09 13:30:09.055608075 +0000 UTC m=+284.829399114" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.199055 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.297860 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcgb8\" (UniqueName: \"kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8\") pod \"5b86678f-707c-438a-afc6-a6475c9a46c3\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.298230 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume\") pod \"5b86678f-707c-438a-afc6-a6475c9a46c3\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.298322 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume\") pod \"5b86678f-707c-438a-afc6-a6475c9a46c3\" (UID: \"5b86678f-707c-438a-afc6-a6475c9a46c3\") " Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.299079 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume" (OuterVolumeSpecName: "config-volume") pod "5b86678f-707c-438a-afc6-a6475c9a46c3" (UID: "5b86678f-707c-438a-afc6-a6475c9a46c3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.304577 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8" (OuterVolumeSpecName: "kube-api-access-zcgb8") pod "5b86678f-707c-438a-afc6-a6475c9a46c3" (UID: "5b86678f-707c-438a-afc6-a6475c9a46c3"). InnerVolumeSpecName "kube-api-access-zcgb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.305722 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5b86678f-707c-438a-afc6-a6475c9a46c3" (UID: "5b86678f-707c-438a-afc6-a6475c9a46c3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.399780 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b86678f-707c-438a-afc6-a6475c9a46c3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.399817 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcgb8\" (UniqueName: \"kubernetes.io/projected/5b86678f-707c-438a-afc6-a6475c9a46c3-kube-api-access-zcgb8\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.399834 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b86678f-707c-438a-afc6-a6475c9a46c3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.936727 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" event={"ID":"5b86678f-707c-438a-afc6-a6475c9a46c3","Type":"ContainerDied","Data":"5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402"} Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.936765 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:10.936766 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a998dae3742e4bbbaba94e24ed8b469f70452132932d2cd8b27e35451d2f402" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.434790 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.434850 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.635797 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.635841 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.823768 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.823832 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.954251 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.954983 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:12.955535 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.000008 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.009667 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.009705 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.013626 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.018918 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.061119 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:13.993879 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:14.815340 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:14.815831 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:14.864418 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:14.991852 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:15.350997 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:15.351258 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z5j4d" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="registry-server" containerID="cri-o://9e96ac38abb47aa753634809d2416011ac3a2a87e5a7e854ecc63f45e6306c46" gracePeriod=2 Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:15.932021 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:30:15 crc kubenswrapper[4762]: I1009 13:30:15.932073 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.001956 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.061317 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.221891 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.221976 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.268582 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.973945 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerStarted","Data":"96b51c886165381225913b82858bed93a796df93a9e4a1fe9324c79f45244d3f"} Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.975456 4762 generic.go:334] "Generic (PLEG): container finished" podID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerID="9e96ac38abb47aa753634809d2416011ac3a2a87e5a7e854ecc63f45e6306c46" exitCode=0 Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.975786 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerDied","Data":"9e96ac38abb47aa753634809d2416011ac3a2a87e5a7e854ecc63f45e6306c46"} Oct 09 13:30:16 crc kubenswrapper[4762]: I1009 13:30:16.998225 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-85rnj" podStartSLOduration=2.772818433 podStartE2EDuration="2m12.998203274s" podCreationTimestamp="2025-10-09 13:28:04 +0000 UTC" firstStartedPulling="2025-10-09 13:28:06.022340633 +0000 UTC m=+161.796131672" lastFinishedPulling="2025-10-09 13:30:16.247725474 +0000 UTC m=+292.021516513" observedRunningTime="2025-10-09 13:30:16.996357993 +0000 UTC m=+292.770149042" watchObservedRunningTime="2025-10-09 13:30:16.998203274 +0000 UTC m=+292.771994313" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.028523 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.106532 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.152088 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.152292 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r6qld" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="registry-server" containerID="cri-o://c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79" gracePeriod=2 Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.295389 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv8s4\" (UniqueName: \"kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4\") pod \"08bf2dc9-588c-4ea0-b57b-25737b61c178\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.295813 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities\") pod \"08bf2dc9-588c-4ea0-b57b-25737b61c178\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.295866 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content\") pod \"08bf2dc9-588c-4ea0-b57b-25737b61c178\" (UID: \"08bf2dc9-588c-4ea0-b57b-25737b61c178\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.297578 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities" (OuterVolumeSpecName: "utilities") pod "08bf2dc9-588c-4ea0-b57b-25737b61c178" (UID: "08bf2dc9-588c-4ea0-b57b-25737b61c178"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.300929 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4" (OuterVolumeSpecName: "kube-api-access-sv8s4") pod "08bf2dc9-588c-4ea0-b57b-25737b61c178" (UID: "08bf2dc9-588c-4ea0-b57b-25737b61c178"). InnerVolumeSpecName "kube-api-access-sv8s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.361590 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08bf2dc9-588c-4ea0-b57b-25737b61c178" (UID: "08bf2dc9-588c-4ea0-b57b-25737b61c178"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.397229 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv8s4\" (UniqueName: \"kubernetes.io/projected/08bf2dc9-588c-4ea0-b57b-25737b61c178-kube-api-access-sv8s4\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.397267 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.397279 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08bf2dc9-588c-4ea0-b57b-25737b61c178-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.558613 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.700882 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content\") pod \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.701024 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities\") pod \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.701073 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwvl7\" (UniqueName: \"kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7\") pod \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\" (UID: \"fbc3d339-b1fb-4705-ab1d-6b2946e12adc\") " Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.701990 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities" (OuterVolumeSpecName: "utilities") pod "fbc3d339-b1fb-4705-ab1d-6b2946e12adc" (UID: "fbc3d339-b1fb-4705-ab1d-6b2946e12adc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.705421 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7" (OuterVolumeSpecName: "kube-api-access-lwvl7") pod "fbc3d339-b1fb-4705-ab1d-6b2946e12adc" (UID: "fbc3d339-b1fb-4705-ab1d-6b2946e12adc"). InnerVolumeSpecName "kube-api-access-lwvl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.751897 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.752165 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7vxtk" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="registry-server" containerID="cri-o://4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf" gracePeriod=2 Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.757914 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbc3d339-b1fb-4705-ab1d-6b2946e12adc" (UID: "fbc3d339-b1fb-4705-ab1d-6b2946e12adc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.802038 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.802084 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.802098 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwvl7\" (UniqueName: \"kubernetes.io/projected/fbc3d339-b1fb-4705-ab1d-6b2946e12adc-kube-api-access-lwvl7\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:17 crc kubenswrapper[4762]: E1009 13:30:17.843369 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09afe313_21ce_4c9b_8a66_299d430a7903.slice/crio-4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.982847 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z5j4d" event={"ID":"08bf2dc9-588c-4ea0-b57b-25737b61c178","Type":"ContainerDied","Data":"21aae81cf50707a46e79ca0bb73e69c4ea2a6265dfa624c249b6386e90d2433c"} Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.983973 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z5j4d" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.984160 4762 scope.go:117] "RemoveContainer" containerID="9e96ac38abb47aa753634809d2416011ac3a2a87e5a7e854ecc63f45e6306c46" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.986217 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerID="c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79" exitCode=0 Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.986782 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6qld" Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.990838 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerDied","Data":"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79"} Oct 09 13:30:17 crc kubenswrapper[4762]: I1009 13:30:17.991081 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6qld" event={"ID":"fbc3d339-b1fb-4705-ab1d-6b2946e12adc","Type":"ContainerDied","Data":"8822c5ccb023a4238b6810b21ff0dc173a1aa1980b2f34c1a0d977958296a063"} Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.074980 4762 scope.go:117] "RemoveContainer" containerID="e0e74b0a8b5a0998cf42adaf33d79f4d0c1386fce5a42ffcd542ed7e0142c991" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.081250 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.082973 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z5j4d"] Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.094873 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.098535 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r6qld"] Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.109696 4762 scope.go:117] "RemoveContainer" containerID="b1de3575418e3af2ed7d2ab63d18e6103681f4363d3961dbe45cfd36e61ee13d" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.126051 4762 scope.go:117] "RemoveContainer" containerID="c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.148915 4762 scope.go:117] "RemoveContainer" containerID="8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.169427 4762 scope.go:117] "RemoveContainer" containerID="5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.183508 4762 scope.go:117] "RemoveContainer" containerID="c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79" Oct 09 13:30:18 crc kubenswrapper[4762]: E1009 13:30:18.184112 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79\": container with ID starting with c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79 not found: ID does not exist" containerID="c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.184144 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79"} err="failed to get container status \"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79\": rpc error: code = NotFound desc = could not find container \"c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79\": container with ID starting with c0fe2eb500a417df3255f20bbdea6b006c5335b441633940e5aedc441f50cc79 not found: ID does not exist" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.184166 4762 scope.go:117] "RemoveContainer" containerID="8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56" Oct 09 13:30:18 crc kubenswrapper[4762]: E1009 13:30:18.184386 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56\": container with ID starting with 8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56 not found: ID does not exist" containerID="8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.184406 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56"} err="failed to get container status \"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56\": rpc error: code = NotFound desc = could not find container \"8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56\": container with ID starting with 8ee51fbd24b4d24ed048a119497c0f777fd00316e6a52399ed6691499c0aec56 not found: ID does not exist" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.184421 4762 scope.go:117] "RemoveContainer" containerID="5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033" Oct 09 13:30:18 crc kubenswrapper[4762]: E1009 13:30:18.184680 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033\": container with ID starting with 5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033 not found: ID does not exist" containerID="5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.184707 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033"} err="failed to get container status \"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033\": rpc error: code = NotFound desc = could not find container \"5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033\": container with ID starting with 5e4a852be16c1713c69e3c2c166a927bb86da1d0ca26d64e7e1ab7eec859e033 not found: ID does not exist" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.561218 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.714005 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content\") pod \"09afe313-21ce-4c9b-8a66-299d430a7903\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.714299 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities\") pod \"09afe313-21ce-4c9b-8a66-299d430a7903\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.714382 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pqx9\" (UniqueName: \"kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9\") pod \"09afe313-21ce-4c9b-8a66-299d430a7903\" (UID: \"09afe313-21ce-4c9b-8a66-299d430a7903\") " Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.715218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities" (OuterVolumeSpecName: "utilities") pod "09afe313-21ce-4c9b-8a66-299d430a7903" (UID: "09afe313-21ce-4c9b-8a66-299d430a7903"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.720481 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9" (OuterVolumeSpecName: "kube-api-access-7pqx9") pod "09afe313-21ce-4c9b-8a66-299d430a7903" (UID: "09afe313-21ce-4c9b-8a66-299d430a7903"). InnerVolumeSpecName "kube-api-access-7pqx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.726333 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09afe313-21ce-4c9b-8a66-299d430a7903" (UID: "09afe313-21ce-4c9b-8a66-299d430a7903"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.815454 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.815781 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09afe313-21ce-4c9b-8a66-299d430a7903-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.815877 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pqx9\" (UniqueName: \"kubernetes.io/projected/09afe313-21ce-4c9b-8a66-299d430a7903-kube-api-access-7pqx9\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.973615 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" path="/var/lib/kubelet/pods/08bf2dc9-588c-4ea0-b57b-25737b61c178/volumes" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.974282 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" path="/var/lib/kubelet/pods/fbc3d339-b1fb-4705-ab1d-6b2946e12adc/volumes" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.995468 4762 generic.go:334] "Generic (PLEG): container finished" podID="09afe313-21ce-4c9b-8a66-299d430a7903" containerID="4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf" exitCode=0 Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.995589 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7vxtk" Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.995588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerDied","Data":"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf"} Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.996523 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7vxtk" event={"ID":"09afe313-21ce-4c9b-8a66-299d430a7903","Type":"ContainerDied","Data":"a9cf734357b47ece4ec2424830bbab8d83a302575fafeaedea1101a6b09e87f1"} Oct 09 13:30:18 crc kubenswrapper[4762]: I1009 13:30:18.996559 4762 scope.go:117] "RemoveContainer" containerID="4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.014681 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.016271 4762 scope.go:117] "RemoveContainer" containerID="7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.019780 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7vxtk"] Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.029948 4762 scope.go:117] "RemoveContainer" containerID="c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.044117 4762 scope.go:117] "RemoveContainer" containerID="4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf" Oct 09 13:30:19 crc kubenswrapper[4762]: E1009 13:30:19.044654 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf\": container with ID starting with 4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf not found: ID does not exist" containerID="4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.044694 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf"} err="failed to get container status \"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf\": rpc error: code = NotFound desc = could not find container \"4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf\": container with ID starting with 4d4343b5e5b136dbbf66db7f40712a956fe00d3f8bfcaf8bb4007c45aff0fecf not found: ID does not exist" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.044723 4762 scope.go:117] "RemoveContainer" containerID="7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1" Oct 09 13:30:19 crc kubenswrapper[4762]: E1009 13:30:19.045131 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1\": container with ID starting with 7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1 not found: ID does not exist" containerID="7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.045258 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1"} err="failed to get container status \"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1\": rpc error: code = NotFound desc = could not find container \"7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1\": container with ID starting with 7632b498d41ffee1afe922af3fb5c0a5014ef3b2b9d608aa4c1933060bebf0d1 not found: ID does not exist" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.045473 4762 scope.go:117] "RemoveContainer" containerID="c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195" Oct 09 13:30:19 crc kubenswrapper[4762]: E1009 13:30:19.045940 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195\": container with ID starting with c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195 not found: ID does not exist" containerID="c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195" Oct 09 13:30:19 crc kubenswrapper[4762]: I1009 13:30:19.045971 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195"} err="failed to get container status \"c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195\": rpc error: code = NotFound desc = could not find container \"c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195\": container with ID starting with c5f1a2889a8b730162a631ff5a04892a98778a819711330bbd7da866ee5fc195 not found: ID does not exist" Oct 09 13:30:20 crc kubenswrapper[4762]: I1009 13:30:20.151253 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:30:20 crc kubenswrapper[4762]: I1009 13:30:20.151738 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jcvrf" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="registry-server" containerID="cri-o://92b4a6be1c317d6790eb59c8dd769941f5fa0e4cdde697200e51f52e5dd78c64" gracePeriod=2 Oct 09 13:30:20 crc kubenswrapper[4762]: I1009 13:30:20.979131 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" path="/var/lib/kubelet/pods/09afe313-21ce-4c9b-8a66-299d430a7903/volumes" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.027239 4762 generic.go:334] "Generic (PLEG): container finished" podID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerID="92b4a6be1c317d6790eb59c8dd769941f5fa0e4cdde697200e51f52e5dd78c64" exitCode=0 Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.027289 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerDied","Data":"92b4a6be1c317d6790eb59c8dd769941f5fa0e4cdde697200e51f52e5dd78c64"} Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.174917 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.354415 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities\") pod \"821e572e-77d5-4661-9dec-7da0cad19a4d\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.354523 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content\") pod \"821e572e-77d5-4661-9dec-7da0cad19a4d\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.354590 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgnrf\" (UniqueName: \"kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf\") pod \"821e572e-77d5-4661-9dec-7da0cad19a4d\" (UID: \"821e572e-77d5-4661-9dec-7da0cad19a4d\") " Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.356260 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities" (OuterVolumeSpecName: "utilities") pod "821e572e-77d5-4661-9dec-7da0cad19a4d" (UID: "821e572e-77d5-4661-9dec-7da0cad19a4d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.365327 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf" (OuterVolumeSpecName: "kube-api-access-wgnrf") pod "821e572e-77d5-4661-9dec-7da0cad19a4d" (UID: "821e572e-77d5-4661-9dec-7da0cad19a4d"). InnerVolumeSpecName "kube-api-access-wgnrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.440349 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "821e572e-77d5-4661-9dec-7da0cad19a4d" (UID: "821e572e-77d5-4661-9dec-7da0cad19a4d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.456363 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.456406 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/821e572e-77d5-4661-9dec-7da0cad19a4d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:21 crc kubenswrapper[4762]: I1009 13:30:21.456419 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgnrf\" (UniqueName: \"kubernetes.io/projected/821e572e-77d5-4661-9dec-7da0cad19a4d-kube-api-access-wgnrf\") on node \"crc\" DevicePath \"\"" Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.042835 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jcvrf" event={"ID":"821e572e-77d5-4661-9dec-7da0cad19a4d","Type":"ContainerDied","Data":"ffd0239aacd99ad969d81584fa143b5188f7e93045375b9a8a78be8ff3e6b5fc"} Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.042905 4762 scope.go:117] "RemoveContainer" containerID="92b4a6be1c317d6790eb59c8dd769941f5fa0e4cdde697200e51f52e5dd78c64" Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.042922 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jcvrf" Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.063408 4762 scope.go:117] "RemoveContainer" containerID="668cf738aa263f9b172b9edab5f75a8a2fe2414026b7265b6387ec6c2ba68160" Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.083176 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.084068 4762 scope.go:117] "RemoveContainer" containerID="315071a6e84ff8085750efcc6f506f8336dac2f3641ac7968b45c3d2555e832d" Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.087494 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jcvrf"] Oct 09 13:30:22 crc kubenswrapper[4762]: I1009 13:30:22.974423 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" path="/var/lib/kubelet/pods/821e572e-77d5-4661-9dec-7da0cad19a4d/volumes" Oct 09 13:30:24 crc kubenswrapper[4762]: I1009 13:30:24.400939 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:30:24 crc kubenswrapper[4762]: I1009 13:30:24.401199 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:30:24 crc kubenswrapper[4762]: I1009 13:30:24.438826 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:30:25 crc kubenswrapper[4762]: I1009 13:30:25.125383 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:30:38 crc kubenswrapper[4762]: I1009 13:30:38.545602 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.576405 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerName="oauth-openshift" containerID="cri-o://eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5" gracePeriod=15 Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.885995 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911593 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6db756fc5f-hcllh"] Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911836 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911852 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911865 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911874 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911886 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911895 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911907 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerName="oauth-openshift" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911914 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerName="oauth-openshift" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911922 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911929 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911938 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911945 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911953 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911960 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911970 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b86678f-707c-438a-afc6-a6475c9a46c3" containerName="collect-profiles" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911978 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b86678f-707c-438a-afc6-a6475c9a46c3" containerName="collect-profiles" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.911989 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.911996 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.912010 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912019 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="extract-utilities" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.912030 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912037 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.912050 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912056 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="extract-content" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.912064 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912071 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: E1009 13:31:03.912079 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912086 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912187 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="09afe313-21ce-4c9b-8a66-299d430a7903" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912205 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="08bf2dc9-588c-4ea0-b57b-25737b61c178" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912215 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="821e572e-77d5-4661-9dec-7da0cad19a4d" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912227 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbc3d339-b1fb-4705-ab1d-6b2946e12adc" containerName="registry-server" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912239 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerName="oauth-openshift" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912249 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b86678f-707c-438a-afc6-a6475c9a46c3" containerName="collect-profiles" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.912699 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925313 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925417 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925480 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925510 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925572 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26kh8\" (UniqueName: \"kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925603 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925682 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925719 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925793 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925845 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925869 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925892 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.925942 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926025 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca\") pod \"d78ceb4a-7433-44e2-a874-37b69473fc54\" (UID: \"d78ceb4a-7433-44e2-a874-37b69473fc54\") " Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926395 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-dir\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926515 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-error\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926567 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-policies\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926600 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926626 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-router-certs\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926679 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-login\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926712 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svvpn\" (UniqueName: \"kubernetes.io/projected/b6cf6558-0ea5-404c-b973-e155c6218a8b-kube-api-access-svvpn\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926765 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926793 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-session\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926859 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-service-ca\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926886 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926945 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.926971 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.928918 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6db756fc5f-hcllh"] Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.930971 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.932858 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.933457 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.934071 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.934380 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.936948 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.943118 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.943626 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.943959 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8" (OuterVolumeSpecName: "kube-api-access-26kh8") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "kube-api-access-26kh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.943955 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.945245 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.945680 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.947172 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:03 crc kubenswrapper[4762]: I1009 13:31:03.949136 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d78ceb4a-7433-44e2-a874-37b69473fc54" (UID: "d78ceb4a-7433-44e2-a874-37b69473fc54"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028357 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028441 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-dir\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028485 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028527 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-error\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028552 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-policies\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028579 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028607 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-router-certs\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028634 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-login\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028651 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-dir\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028685 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svvpn\" (UniqueName: \"kubernetes.io/projected/b6cf6558-0ea5-404c-b973-e155c6218a8b-kube-api-access-svvpn\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028717 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.028747 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-session\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.029394 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-audit-policies\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.029525 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-service-ca\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.029649 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.029792 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030016 4762 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030393 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030426 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030442 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030458 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030473 4762 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d78ceb4a-7433-44e2-a874-37b69473fc54-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030488 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030502 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030516 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030529 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030544 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030560 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030574 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26kh8\" (UniqueName: \"kubernetes.io/projected/d78ceb4a-7433-44e2-a874-37b69473fc54-kube-api-access-26kh8\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030587 4762 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d78ceb4a-7433-44e2-a874-37b69473fc54-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030793 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-service-ca\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.030840 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.031079 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.032005 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.032457 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-error\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.032679 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-session\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.032726 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-router-certs\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.033982 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.034115 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-user-template-login\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.034602 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.034925 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b6cf6558-0ea5-404c-b973-e155c6218a8b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.045509 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svvpn\" (UniqueName: \"kubernetes.io/projected/b6cf6558-0ea5-404c-b973-e155c6218a8b-kube-api-access-svvpn\") pod \"oauth-openshift-6db756fc5f-hcllh\" (UID: \"b6cf6558-0ea5-404c-b973-e155c6218a8b\") " pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.278305 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.288127 4762 generic.go:334] "Generic (PLEG): container finished" podID="d78ceb4a-7433-44e2-a874-37b69473fc54" containerID="eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5" exitCode=0 Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.288191 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" event={"ID":"d78ceb4a-7433-44e2-a874-37b69473fc54","Type":"ContainerDied","Data":"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5"} Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.288217 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" event={"ID":"d78ceb4a-7433-44e2-a874-37b69473fc54","Type":"ContainerDied","Data":"b68b428280f15278dab0e390e965d5b45c3a18868128af523c6ee6f63893e1e3"} Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.288233 4762 scope.go:117] "RemoveContainer" containerID="eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.288343 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-8x7j8" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.317956 4762 scope.go:117] "RemoveContainer" containerID="eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5" Oct 09 13:31:04 crc kubenswrapper[4762]: E1009 13:31:04.318444 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5\": container with ID starting with eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5 not found: ID does not exist" containerID="eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.318486 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5"} err="failed to get container status \"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5\": rpc error: code = NotFound desc = could not find container \"eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5\": container with ID starting with eeca344f421b2e2818938ee4c5a16c98d66347ef49a4540e6d3c884d88e49cf5 not found: ID does not exist" Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.331690 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.334651 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-8x7j8"] Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.682562 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6db756fc5f-hcllh"] Oct 09 13:31:04 crc kubenswrapper[4762]: W1009 13:31:04.691885 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6cf6558_0ea5_404c_b973_e155c6218a8b.slice/crio-884ed22b90edbbb132fecff9c111dded41525cdc76668e13f8a1d80bb2a009ba WatchSource:0}: Error finding container 884ed22b90edbbb132fecff9c111dded41525cdc76668e13f8a1d80bb2a009ba: Status 404 returned error can't find the container with id 884ed22b90edbbb132fecff9c111dded41525cdc76668e13f8a1d80bb2a009ba Oct 09 13:31:04 crc kubenswrapper[4762]: I1009 13:31:04.981745 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d78ceb4a-7433-44e2-a874-37b69473fc54" path="/var/lib/kubelet/pods/d78ceb4a-7433-44e2-a874-37b69473fc54/volumes" Oct 09 13:31:05 crc kubenswrapper[4762]: I1009 13:31:05.297608 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" event={"ID":"b6cf6558-0ea5-404c-b973-e155c6218a8b","Type":"ContainerStarted","Data":"aba2f3c56b9ea62980fe8df7ab55af8996d15f2d93d683b2d9b563298f4657c8"} Oct 09 13:31:05 crc kubenswrapper[4762]: I1009 13:31:05.298008 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" event={"ID":"b6cf6558-0ea5-404c-b973-e155c6218a8b","Type":"ContainerStarted","Data":"884ed22b90edbbb132fecff9c111dded41525cdc76668e13f8a1d80bb2a009ba"} Oct 09 13:31:05 crc kubenswrapper[4762]: I1009 13:31:05.298032 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:05 crc kubenswrapper[4762]: I1009 13:31:05.303789 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" Oct 09 13:31:05 crc kubenswrapper[4762]: I1009 13:31:05.325616 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6db756fc5f-hcllh" podStartSLOduration=27.325597219 podStartE2EDuration="27.325597219s" podCreationTimestamp="2025-10-09 13:30:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:31:05.315753984 +0000 UTC m=+341.089545023" watchObservedRunningTime="2025-10-09 13:31:05.325597219 +0000 UTC m=+341.099388258" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.138442 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.139399 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wpc52" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="registry-server" containerID="cri-o://a54c44462acb20bfa5c92c653424f4cc44d835ec5986a7435128ccd9eeca4720" gracePeriod=30 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.145608 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.145926 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jhmrt" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="registry-server" containerID="cri-o://55f12de18f2d336c577d2ff4f9e63713636ad0c1f479922614aa6c6d35b378e7" gracePeriod=30 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.160053 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.160289 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" containerID="cri-o://c2ed51d56b35cc07b84a24f0a43b49a30184c3d9711710a5d036333a944683f4" gracePeriod=30 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.171985 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.172238 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-85rnj" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="registry-server" containerID="cri-o://96b51c886165381225913b82858bed93a796df93a9e4a1fe9324c79f45244d3f" gracePeriod=30 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.184413 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.184704 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2hbjn" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="registry-server" containerID="cri-o://37b676a90e1a450abb55e7a1d4fe8b37081c21e83408d32fd7fc0abd9d617067" gracePeriod=30 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.196878 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkd7x"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.197679 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.198887 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkd7x"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.296833 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.296931 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.297222 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-std8s\" (UniqueName: \"kubernetes.io/projected/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-kube-api-access-std8s\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.370442 4762 generic.go:334] "Generic (PLEG): container finished" podID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerID="96b51c886165381225913b82858bed93a796df93a9e4a1fe9324c79f45244d3f" exitCode=0 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.370535 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerDied","Data":"96b51c886165381225913b82858bed93a796df93a9e4a1fe9324c79f45244d3f"} Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.379092 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerID="55f12de18f2d336c577d2ff4f9e63713636ad0c1f479922614aa6c6d35b378e7" exitCode=0 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.379156 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerDied","Data":"55f12de18f2d336c577d2ff4f9e63713636ad0c1f479922614aa6c6d35b378e7"} Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.380203 4762 generic.go:334] "Generic (PLEG): container finished" podID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerID="c2ed51d56b35cc07b84a24f0a43b49a30184c3d9711710a5d036333a944683f4" exitCode=0 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.380241 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" event={"ID":"fe403e6f-4f08-4263-962d-377d0989c0d7","Type":"ContainerDied","Data":"c2ed51d56b35cc07b84a24f0a43b49a30184c3d9711710a5d036333a944683f4"} Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.382547 4762 generic.go:334] "Generic (PLEG): container finished" podID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerID="a54c44462acb20bfa5c92c653424f4cc44d835ec5986a7435128ccd9eeca4720" exitCode=0 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.382594 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerDied","Data":"a54c44462acb20bfa5c92c653424f4cc44d835ec5986a7435128ccd9eeca4720"} Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.385391 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerID="37b676a90e1a450abb55e7a1d4fe8b37081c21e83408d32fd7fc0abd9d617067" exitCode=0 Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.385430 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerDied","Data":"37b676a90e1a450abb55e7a1d4fe8b37081c21e83408d32fd7fc0abd9d617067"} Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.398349 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-std8s\" (UniqueName: \"kubernetes.io/projected/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-kube-api-access-std8s\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.398456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.398508 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.399865 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.405048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.416458 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-std8s\" (UniqueName: \"kubernetes.io/projected/7f6d1478-3db5-4052-ba0f-6ede9c7e06d7-kube-api-access-std8s\") pod \"marketplace-operator-79b997595-bkd7x\" (UID: \"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7\") " pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.632080 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.636502 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.641049 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.648572 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.668422 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.671930 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702459 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities\") pod \"ff2e2ba5-4651-4774-a428-10af2c71736d\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702553 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics\") pod \"fe403e6f-4f08-4263-962d-377d0989c0d7\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702778 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp9qn\" (UniqueName: \"kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn\") pod \"ff2e2ba5-4651-4774-a428-10af2c71736d\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702823 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpjt7\" (UniqueName: \"kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7\") pod \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702859 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8vrg\" (UniqueName: \"kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg\") pod \"fe403e6f-4f08-4263-962d-377d0989c0d7\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702877 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content\") pod \"ff2e2ba5-4651-4774-a428-10af2c71736d\" (UID: \"ff2e2ba5-4651-4774-a428-10af2c71736d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702903 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities\") pod \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702929 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content\") pod \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702948 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities\") pod \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.702967 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities\") pod \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.703005 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkzlr\" (UniqueName: \"kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr\") pod \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\" (UID: \"1f2bb561-6df4-46da-b21a-1d5621f45ab9\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.703040 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content\") pod \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.703078 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxgtn\" (UniqueName: \"kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn\") pod \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\" (UID: \"4de8e24c-273f-4ff9-83c2-9dd8952c3d74\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.703099 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca\") pod \"fe403e6f-4f08-4263-962d-377d0989c0d7\" (UID: \"fe403e6f-4f08-4263-962d-377d0989c0d7\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.703116 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content\") pod \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\" (UID: \"2c17d894-7ee5-44c4-b64a-c05be6870a3d\") " Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.704239 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities" (OuterVolumeSpecName: "utilities") pod "ff2e2ba5-4651-4774-a428-10af2c71736d" (UID: "ff2e2ba5-4651-4774-a428-10af2c71736d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.706686 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities" (OuterVolumeSpecName: "utilities") pod "2c17d894-7ee5-44c4-b64a-c05be6870a3d" (UID: "2c17d894-7ee5-44c4-b64a-c05be6870a3d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.707294 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "fe403e6f-4f08-4263-962d-377d0989c0d7" (UID: "fe403e6f-4f08-4263-962d-377d0989c0d7"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.710305 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr" (OuterVolumeSpecName: "kube-api-access-rkzlr") pod "1f2bb561-6df4-46da-b21a-1d5621f45ab9" (UID: "1f2bb561-6df4-46da-b21a-1d5621f45ab9"). InnerVolumeSpecName "kube-api-access-rkzlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.712513 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn" (OuterVolumeSpecName: "kube-api-access-cxgtn") pod "4de8e24c-273f-4ff9-83c2-9dd8952c3d74" (UID: "4de8e24c-273f-4ff9-83c2-9dd8952c3d74"). InnerVolumeSpecName "kube-api-access-cxgtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.713065 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities" (OuterVolumeSpecName: "utilities") pod "1f2bb561-6df4-46da-b21a-1d5621f45ab9" (UID: "1f2bb561-6df4-46da-b21a-1d5621f45ab9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.714708 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "fe403e6f-4f08-4263-962d-377d0989c0d7" (UID: "fe403e6f-4f08-4263-962d-377d0989c0d7"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.718148 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg" (OuterVolumeSpecName: "kube-api-access-s8vrg") pod "fe403e6f-4f08-4263-962d-377d0989c0d7" (UID: "fe403e6f-4f08-4263-962d-377d0989c0d7"). InnerVolumeSpecName "kube-api-access-s8vrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.729904 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7" (OuterVolumeSpecName: "kube-api-access-gpjt7") pod "2c17d894-7ee5-44c4-b64a-c05be6870a3d" (UID: "2c17d894-7ee5-44c4-b64a-c05be6870a3d"). InnerVolumeSpecName "kube-api-access-gpjt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.730727 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities" (OuterVolumeSpecName: "utilities") pod "4de8e24c-273f-4ff9-83c2-9dd8952c3d74" (UID: "4de8e24c-273f-4ff9-83c2-9dd8952c3d74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.733795 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn" (OuterVolumeSpecName: "kube-api-access-wp9qn") pod "ff2e2ba5-4651-4774-a428-10af2c71736d" (UID: "ff2e2ba5-4651-4774-a428-10af2c71736d"). InnerVolumeSpecName "kube-api-access-wp9qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.753623 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff2e2ba5-4651-4774-a428-10af2c71736d" (UID: "ff2e2ba5-4651-4774-a428-10af2c71736d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.803415 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4de8e24c-273f-4ff9-83c2-9dd8952c3d74" (UID: "4de8e24c-273f-4ff9-83c2-9dd8952c3d74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804050 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804068 4762 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804078 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp9qn\" (UniqueName: \"kubernetes.io/projected/ff2e2ba5-4651-4774-a428-10af2c71736d-kube-api-access-wp9qn\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804087 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpjt7\" (UniqueName: \"kubernetes.io/projected/2c17d894-7ee5-44c4-b64a-c05be6870a3d-kube-api-access-gpjt7\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804095 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8vrg\" (UniqueName: \"kubernetes.io/projected/fe403e6f-4f08-4263-962d-377d0989c0d7-kube-api-access-s8vrg\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804103 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff2e2ba5-4651-4774-a428-10af2c71736d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804110 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804118 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804127 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804135 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkzlr\" (UniqueName: \"kubernetes.io/projected/1f2bb561-6df4-46da-b21a-1d5621f45ab9-kube-api-access-rkzlr\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804144 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804152 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxgtn\" (UniqueName: \"kubernetes.io/projected/4de8e24c-273f-4ff9-83c2-9dd8952c3d74-kube-api-access-cxgtn\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.804160 4762 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe403e6f-4f08-4263-962d-377d0989c0d7-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.819126 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c17d894-7ee5-44c4-b64a-c05be6870a3d" (UID: "2c17d894-7ee5-44c4-b64a-c05be6870a3d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.837942 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f2bb561-6df4-46da-b21a-1d5621f45ab9" (UID: "1f2bb561-6df4-46da-b21a-1d5621f45ab9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.875561 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bkd7x"] Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.905469 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2bb561-6df4-46da-b21a-1d5621f45ab9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:17 crc kubenswrapper[4762]: I1009 13:31:17.905514 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c17d894-7ee5-44c4-b64a-c05be6870a3d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.394172 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-85rnj" event={"ID":"ff2e2ba5-4651-4774-a428-10af2c71736d","Type":"ContainerDied","Data":"7a582a0760c79a125829e8db3e0ff872a5e3d3e9fb586a7a347432faa323ed93"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.394243 4762 scope.go:117] "RemoveContainer" containerID="96b51c886165381225913b82858bed93a796df93a9e4a1fe9324c79f45244d3f" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.394384 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-85rnj" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.398035 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jhmrt" event={"ID":"2c17d894-7ee5-44c4-b64a-c05be6870a3d","Type":"ContainerDied","Data":"38f749b83d8d32586071f6544e3e797ab27b5755ceb9d3ad7adb1f2080cdd30b"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.398060 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jhmrt" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.399409 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.399416 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9957f" event={"ID":"fe403e6f-4f08-4263-962d-377d0989c0d7","Type":"ContainerDied","Data":"4b7c2a660376a338893ca71fbf42df00ebbefb388ce6f4565d1e71bae9aa2502"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.402397 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpc52" event={"ID":"4de8e24c-273f-4ff9-83c2-9dd8952c3d74","Type":"ContainerDied","Data":"8f4a98ac5dff448c77eff320ad461c9a797f7cb9c66a70b451733b048910f5b9"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.402559 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpc52" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.408009 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" event={"ID":"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7","Type":"ContainerStarted","Data":"941ea656ed73071d6d66171e183e9bcdae82ec9346a74735c70e6f569201e358"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.408060 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" event={"ID":"7f6d1478-3db5-4052-ba0f-6ede9c7e06d7","Type":"ContainerStarted","Data":"daf2a5b8257fbef87526c8e798f2b73f20385bc522ca303dc4c1e92ea27050da"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.411846 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.411857 4762 scope.go:117] "RemoveContainer" containerID="f0accd3794d8cf0fa68edc4606868dcbe601e619a853babd9e3a1fd9cdbdbff8" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.418306 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.423805 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hbjn" event={"ID":"1f2bb561-6df4-46da-b21a-1d5621f45ab9","Type":"ContainerDied","Data":"b4b7bdc0e5fc90a3df035708ec76024e8c42f944de5a954620b62eb3fd3251e3"} Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.425264 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hbjn" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.435137 4762 scope.go:117] "RemoveContainer" containerID="a8f50281a01652ded24d989a78c23da5f6bb6993018869142bf1aa47eeb9400b" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.450706 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.455426 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-85rnj"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.466856 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bkd7x" podStartSLOduration=1.466830715 podStartE2EDuration="1.466830715s" podCreationTimestamp="2025-10-09 13:31:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:31:18.459821638 +0000 UTC m=+354.233612687" watchObservedRunningTime="2025-10-09 13:31:18.466830715 +0000 UTC m=+354.240621754" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.532178 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.535785 4762 scope.go:117] "RemoveContainer" containerID="55f12de18f2d336c577d2ff4f9e63713636ad0c1f479922614aa6c6d35b378e7" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.551747 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9957f"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.559956 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.563159 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jhmrt"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.571745 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.573053 4762 scope.go:117] "RemoveContainer" containerID="5fb908db3bccec818fd51345cdca347dd4afd33fe37059fa68fa568f08e546b1" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.575482 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wpc52"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.594386 4762 scope.go:117] "RemoveContainer" containerID="d748135a3609670b4dee3556da0537e6568c637cb9e3fd6927899e7bf69164e3" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.595680 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.600552 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2hbjn"] Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.609050 4762 scope.go:117] "RemoveContainer" containerID="c2ed51d56b35cc07b84a24f0a43b49a30184c3d9711710a5d036333a944683f4" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.628110 4762 scope.go:117] "RemoveContainer" containerID="a54c44462acb20bfa5c92c653424f4cc44d835ec5986a7435128ccd9eeca4720" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.641817 4762 scope.go:117] "RemoveContainer" containerID="6779ccb274469de2b9e2c1f300cb3adad77cdb8d87dd1198d25ccad4a7b7e65f" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.655330 4762 scope.go:117] "RemoveContainer" containerID="a0bb71c4e8fa6e7c527ba206fe6e147b1d55e9323c831c1e6f73240578d0434c" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.669598 4762 scope.go:117] "RemoveContainer" containerID="37b676a90e1a450abb55e7a1d4fe8b37081c21e83408d32fd7fc0abd9d617067" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.681176 4762 scope.go:117] "RemoveContainer" containerID="11694a4cc1f0f822fe492f7db6c53cd3d819e37a9be3970be0ca2f0ab7878601" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.696613 4762 scope.go:117] "RemoveContainer" containerID="1c4847e2e2771fd049d0cc794c22ea90f734fbf9449725b301b66c1eb62cd17b" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.972436 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" path="/var/lib/kubelet/pods/1f2bb561-6df4-46da-b21a-1d5621f45ab9/volumes" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.973620 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" path="/var/lib/kubelet/pods/2c17d894-7ee5-44c4-b64a-c05be6870a3d/volumes" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.974352 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" path="/var/lib/kubelet/pods/4de8e24c-273f-4ff9-83c2-9dd8952c3d74/volumes" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.975830 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" path="/var/lib/kubelet/pods/fe403e6f-4f08-4263-962d-377d0989c0d7/volumes" Oct 09 13:31:18 crc kubenswrapper[4762]: I1009 13:31:18.976388 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" path="/var/lib/kubelet/pods/ff2e2ba5-4651-4774-a428-10af2c71736d/volumes" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356098 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-72l8h"] Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356296 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356308 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356319 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356324 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356336 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356342 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356350 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356356 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356363 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356369 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356383 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356391 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356399 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356407 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356415 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356422 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="extract-utilities" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356430 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356436 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356443 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356450 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356457 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356463 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="extract-content" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356472 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356477 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: E1009 13:31:19.356488 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356494 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356575 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c17d894-7ee5-44c4-b64a-c05be6870a3d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356586 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de8e24c-273f-4ff9-83c2-9dd8952c3d74" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356594 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f2bb561-6df4-46da-b21a-1d5621f45ab9" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356602 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe403e6f-4f08-4263-962d-377d0989c0d7" containerName="marketplace-operator" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.356612 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff2e2ba5-4651-4774-a428-10af2c71736d" containerName="registry-server" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.357292 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.359910 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.367346 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-72l8h"] Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.428588 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-utilities\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.428689 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8x5p\" (UniqueName: \"kubernetes.io/projected/5621da56-0dc3-4cb7-867f-192959d65ddf-kube-api-access-w8x5p\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.428728 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-catalog-content\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.529730 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-catalog-content\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.529825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-utilities\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.529865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8x5p\" (UniqueName: \"kubernetes.io/projected/5621da56-0dc3-4cb7-867f-192959d65ddf-kube-api-access-w8x5p\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.530330 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-utilities\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.530545 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5621da56-0dc3-4cb7-867f-192959d65ddf-catalog-content\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.556022 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2v5hs"] Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.557727 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.560232 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.563113 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2v5hs"] Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.566262 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8x5p\" (UniqueName: \"kubernetes.io/projected/5621da56-0dc3-4cb7-867f-192959d65ddf-kube-api-access-w8x5p\") pod \"redhat-marketplace-72l8h\" (UID: \"5621da56-0dc3-4cb7-867f-192959d65ddf\") " pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.630625 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-catalog-content\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.630727 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwbx9\" (UniqueName: \"kubernetes.io/projected/c2063e06-c068-4526-b785-8ff55d071770-kube-api-access-kwbx9\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.630772 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-utilities\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.675973 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.731891 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-catalog-content\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.732264 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwbx9\" (UniqueName: \"kubernetes.io/projected/c2063e06-c068-4526-b785-8ff55d071770-kube-api-access-kwbx9\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.732313 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-utilities\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.732412 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-catalog-content\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.732749 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2063e06-c068-4526-b785-8ff55d071770-utilities\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.748627 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwbx9\" (UniqueName: \"kubernetes.io/projected/c2063e06-c068-4526-b785-8ff55d071770-kube-api-access-kwbx9\") pod \"redhat-operators-2v5hs\" (UID: \"c2063e06-c068-4526-b785-8ff55d071770\") " pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.890268 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-72l8h"] Oct 09 13:31:19 crc kubenswrapper[4762]: I1009 13:31:19.939647 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:20 crc kubenswrapper[4762]: I1009 13:31:20.325763 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2v5hs"] Oct 09 13:31:20 crc kubenswrapper[4762]: W1009 13:31:20.329442 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2063e06_c068_4526_b785_8ff55d071770.slice/crio-346fdf95e0a1f7f9990df27e50f8daaea48b2c56a1dd28ca9404928ad19aaa48 WatchSource:0}: Error finding container 346fdf95e0a1f7f9990df27e50f8daaea48b2c56a1dd28ca9404928ad19aaa48: Status 404 returned error can't find the container with id 346fdf95e0a1f7f9990df27e50f8daaea48b2c56a1dd28ca9404928ad19aaa48 Oct 09 13:31:20 crc kubenswrapper[4762]: I1009 13:31:20.439309 4762 generic.go:334] "Generic (PLEG): container finished" podID="5621da56-0dc3-4cb7-867f-192959d65ddf" containerID="61c399dda0ac2961f7c07501b4e3af88c6c7930753348561380c9f284136208f" exitCode=0 Oct 09 13:31:20 crc kubenswrapper[4762]: I1009 13:31:20.439574 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-72l8h" event={"ID":"5621da56-0dc3-4cb7-867f-192959d65ddf","Type":"ContainerDied","Data":"61c399dda0ac2961f7c07501b4e3af88c6c7930753348561380c9f284136208f"} Oct 09 13:31:20 crc kubenswrapper[4762]: I1009 13:31:20.443568 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-72l8h" event={"ID":"5621da56-0dc3-4cb7-867f-192959d65ddf","Type":"ContainerStarted","Data":"5a95184aba187de12ba73d1f51bbf1ca380806ffa9001d50dec3c3d3cf4e6a61"} Oct 09 13:31:20 crc kubenswrapper[4762]: I1009 13:31:20.444673 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2v5hs" event={"ID":"c2063e06-c068-4526-b785-8ff55d071770","Type":"ContainerStarted","Data":"346fdf95e0a1f7f9990df27e50f8daaea48b2c56a1dd28ca9404928ad19aaa48"} Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.450462 4762 generic.go:334] "Generic (PLEG): container finished" podID="c2063e06-c068-4526-b785-8ff55d071770" containerID="1f69ebb45b6e47192296d74b645cf62227cf83fcbe3649a4a0f2dca95d4bd772" exitCode=0 Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.450715 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2v5hs" event={"ID":"c2063e06-c068-4526-b785-8ff55d071770","Type":"ContainerDied","Data":"1f69ebb45b6e47192296d74b645cf62227cf83fcbe3649a4a0f2dca95d4bd772"} Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.751941 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-22x2q"] Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.754278 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.756631 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.762678 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22x2q"] Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.853960 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-utilities\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.854002 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-catalog-content\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.854063 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7nv7\" (UniqueName: \"kubernetes.io/projected/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-kube-api-access-c7nv7\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.955292 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7nv7\" (UniqueName: \"kubernetes.io/projected/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-kube-api-access-c7nv7\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.955379 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-utilities\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.955411 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-catalog-content\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.956112 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-catalog-content\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.956152 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-utilities\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.963135 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7ldxj"] Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.964535 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.966995 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.967276 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7ldxj"] Oct 09 13:31:21 crc kubenswrapper[4762]: I1009 13:31:21.982085 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7nv7\" (UniqueName: \"kubernetes.io/projected/80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a-kube-api-access-c7nv7\") pod \"certified-operators-22x2q\" (UID: \"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a\") " pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.056666 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-utilities\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.056739 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbls9\" (UniqueName: \"kubernetes.io/projected/31e5de4c-951e-4d10-b219-b7ba26c5e991-kube-api-access-kbls9\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.056777 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-catalog-content\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.079503 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.157939 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbls9\" (UniqueName: \"kubernetes.io/projected/31e5de4c-951e-4d10-b219-b7ba26c5e991-kube-api-access-kbls9\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.158421 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-catalog-content\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.158501 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-utilities\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.158983 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-catalog-content\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.158987 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5de4c-951e-4d10-b219-b7ba26c5e991-utilities\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.180885 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbls9\" (UniqueName: \"kubernetes.io/projected/31e5de4c-951e-4d10-b219-b7ba26c5e991-kube-api-access-kbls9\") pod \"community-operators-7ldxj\" (UID: \"31e5de4c-951e-4d10-b219-b7ba26c5e991\") " pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.259566 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22x2q"] Oct 09 13:31:22 crc kubenswrapper[4762]: W1009 13:31:22.264983 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80ca7107_df0d_4c7a_8bfb_9d4b0dc4f47a.slice/crio-e136295a5d22fb49c481968c61f11868ac32b13d444d89245e5f01bdea5f4ca7 WatchSource:0}: Error finding container e136295a5d22fb49c481968c61f11868ac32b13d444d89245e5f01bdea5f4ca7: Status 404 returned error can't find the container with id e136295a5d22fb49c481968c61f11868ac32b13d444d89245e5f01bdea5f4ca7 Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.278761 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.456985 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7ldxj"] Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.461902 4762 generic.go:334] "Generic (PLEG): container finished" podID="5621da56-0dc3-4cb7-867f-192959d65ddf" containerID="dc875a6b22da8ea63e6f5c5021b25e9e7e0cc323f97c8670069d71209abd4e4a" exitCode=0 Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.461979 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-72l8h" event={"ID":"5621da56-0dc3-4cb7-867f-192959d65ddf","Type":"ContainerDied","Data":"dc875a6b22da8ea63e6f5c5021b25e9e7e0cc323f97c8670069d71209abd4e4a"} Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.463718 4762 generic.go:334] "Generic (PLEG): container finished" podID="80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a" containerID="19d95b0a64600e6925360124201a0608f9c1651755ef8d2d1bb5022838aa2369" exitCode=0 Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.463760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22x2q" event={"ID":"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a","Type":"ContainerDied","Data":"19d95b0a64600e6925360124201a0608f9c1651755ef8d2d1bb5022838aa2369"} Oct 09 13:31:22 crc kubenswrapper[4762]: I1009 13:31:22.463796 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22x2q" event={"ID":"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a","Type":"ContainerStarted","Data":"e136295a5d22fb49c481968c61f11868ac32b13d444d89245e5f01bdea5f4ca7"} Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.471848 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-72l8h" event={"ID":"5621da56-0dc3-4cb7-867f-192959d65ddf","Type":"ContainerStarted","Data":"5c9d0f6efc73289519e4a5599bcef41d1b98a9270cbefa27ef4f45577cd17b4b"} Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.476202 4762 generic.go:334] "Generic (PLEG): container finished" podID="31e5de4c-951e-4d10-b219-b7ba26c5e991" containerID="b6b7db52da815e5feb0e943f247de59cb27e039956ea24d0fde4791a99837682" exitCode=0 Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.476323 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ldxj" event={"ID":"31e5de4c-951e-4d10-b219-b7ba26c5e991","Type":"ContainerDied","Data":"b6b7db52da815e5feb0e943f247de59cb27e039956ea24d0fde4791a99837682"} Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.476354 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ldxj" event={"ID":"31e5de4c-951e-4d10-b219-b7ba26c5e991","Type":"ContainerStarted","Data":"61a1a64a2e8516edead1b857c068589df1d641d20599b4dbb4ba4fe669e33bcb"} Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.478772 4762 generic.go:334] "Generic (PLEG): container finished" podID="c2063e06-c068-4526-b785-8ff55d071770" containerID="a80ce1b6fafea8c91aa6b98b042bc0fdcd86e43c1a98aee4ddbcefa1a8bf10cd" exitCode=0 Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.478809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2v5hs" event={"ID":"c2063e06-c068-4526-b785-8ff55d071770","Type":"ContainerDied","Data":"a80ce1b6fafea8c91aa6b98b042bc0fdcd86e43c1a98aee4ddbcefa1a8bf10cd"} Oct 09 13:31:23 crc kubenswrapper[4762]: I1009 13:31:23.494350 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-72l8h" podStartSLOduration=1.641199609 podStartE2EDuration="4.494327899s" podCreationTimestamp="2025-10-09 13:31:19 +0000 UTC" firstStartedPulling="2025-10-09 13:31:20.454957916 +0000 UTC m=+356.228748955" lastFinishedPulling="2025-10-09 13:31:23.308086206 +0000 UTC m=+359.081877245" observedRunningTime="2025-10-09 13:31:23.488609396 +0000 UTC m=+359.262400445" watchObservedRunningTime="2025-10-09 13:31:23.494327899 +0000 UTC m=+359.268118948" Oct 09 13:31:24 crc kubenswrapper[4762]: I1009 13:31:24.486226 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2v5hs" event={"ID":"c2063e06-c068-4526-b785-8ff55d071770","Type":"ContainerStarted","Data":"805255a12c66ac27e9c0758a238858f490868cb8e0d6f0039985dd63812d0e1a"} Oct 09 13:31:24 crc kubenswrapper[4762]: I1009 13:31:24.488667 4762 generic.go:334] "Generic (PLEG): container finished" podID="80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a" containerID="8ac19e43e81dd0a5e6bae6be166bc29bea0c878d9b8cb235fa94c091a4c04c63" exitCode=0 Oct 09 13:31:24 crc kubenswrapper[4762]: I1009 13:31:24.488816 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22x2q" event={"ID":"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a","Type":"ContainerDied","Data":"8ac19e43e81dd0a5e6bae6be166bc29bea0c878d9b8cb235fa94c091a4c04c63"} Oct 09 13:31:24 crc kubenswrapper[4762]: I1009 13:31:24.516342 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2v5hs" podStartSLOduration=2.85121012 podStartE2EDuration="5.516326309s" podCreationTimestamp="2025-10-09 13:31:19 +0000 UTC" firstStartedPulling="2025-10-09 13:31:21.452572832 +0000 UTC m=+357.226363871" lastFinishedPulling="2025-10-09 13:31:24.117689021 +0000 UTC m=+359.891480060" observedRunningTime="2025-10-09 13:31:24.512220988 +0000 UTC m=+360.286012017" watchObservedRunningTime="2025-10-09 13:31:24.516326309 +0000 UTC m=+360.290117348" Oct 09 13:31:26 crc kubenswrapper[4762]: I1009 13:31:26.503913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22x2q" event={"ID":"80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a","Type":"ContainerStarted","Data":"019945b618b4c14c5ad790d4538ae8726d9e8033e1dd28bcbec0a5c152fdc955"} Oct 09 13:31:26 crc kubenswrapper[4762]: I1009 13:31:26.506289 4762 generic.go:334] "Generic (PLEG): container finished" podID="31e5de4c-951e-4d10-b219-b7ba26c5e991" containerID="2afbb00f0ede6dcf9c5f8b1fe81ac6e41bbdef62489109b1902347c81ded1113" exitCode=0 Oct 09 13:31:26 crc kubenswrapper[4762]: I1009 13:31:26.506339 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ldxj" event={"ID":"31e5de4c-951e-4d10-b219-b7ba26c5e991","Type":"ContainerDied","Data":"2afbb00f0ede6dcf9c5f8b1fe81ac6e41bbdef62489109b1902347c81ded1113"} Oct 09 13:31:26 crc kubenswrapper[4762]: I1009 13:31:26.539243 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-22x2q" podStartSLOduration=2.595295306 podStartE2EDuration="5.539223921s" podCreationTimestamp="2025-10-09 13:31:21 +0000 UTC" firstStartedPulling="2025-10-09 13:31:22.466039021 +0000 UTC m=+358.239830050" lastFinishedPulling="2025-10-09 13:31:25.409967626 +0000 UTC m=+361.183758665" observedRunningTime="2025-10-09 13:31:26.533700283 +0000 UTC m=+362.307491332" watchObservedRunningTime="2025-10-09 13:31:26.539223921 +0000 UTC m=+362.313014960" Oct 09 13:31:27 crc kubenswrapper[4762]: I1009 13:31:27.514006 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7ldxj" event={"ID":"31e5de4c-951e-4d10-b219-b7ba26c5e991","Type":"ContainerStarted","Data":"ad49cd409c4a3a240619c6d94c1abe02e5518d390e4624f070a88a033dae439f"} Oct 09 13:31:27 crc kubenswrapper[4762]: I1009 13:31:27.532460 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7ldxj" podStartSLOduration=2.832802923 podStartE2EDuration="6.532444818s" podCreationTimestamp="2025-10-09 13:31:21 +0000 UTC" firstStartedPulling="2025-10-09 13:31:23.479379468 +0000 UTC m=+359.253170517" lastFinishedPulling="2025-10-09 13:31:27.179021373 +0000 UTC m=+362.952812412" observedRunningTime="2025-10-09 13:31:27.531498203 +0000 UTC m=+363.305289252" watchObservedRunningTime="2025-10-09 13:31:27.532444818 +0000 UTC m=+363.306235857" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.677554 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.677912 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.723215 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.940289 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.940345 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:29 crc kubenswrapper[4762]: I1009 13:31:29.976957 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:30 crc kubenswrapper[4762]: I1009 13:31:30.570384 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-72l8h" Oct 09 13:31:30 crc kubenswrapper[4762]: I1009 13:31:30.570506 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2v5hs" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.080128 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.080355 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.134752 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.279384 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.279747 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.321212 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.582309 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7ldxj" Oct 09 13:31:32 crc kubenswrapper[4762]: I1009 13:31:32.611890 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-22x2q" Oct 09 13:31:41 crc kubenswrapper[4762]: I1009 13:31:41.969805 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:31:41 crc kubenswrapper[4762]: I1009 13:31:41.970236 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:32:11 crc kubenswrapper[4762]: I1009 13:32:11.969527 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:32:11 crc kubenswrapper[4762]: I1009 13:32:11.970082 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:32:41 crc kubenswrapper[4762]: I1009 13:32:41.969770 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:32:41 crc kubenswrapper[4762]: I1009 13:32:41.970357 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:32:41 crc kubenswrapper[4762]: I1009 13:32:41.970404 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:32:41 crc kubenswrapper[4762]: I1009 13:32:41.970971 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:32:41 crc kubenswrapper[4762]: I1009 13:32:41.971037 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d" gracePeriod=600 Oct 09 13:32:42 crc kubenswrapper[4762]: I1009 13:32:42.927907 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d" exitCode=0 Oct 09 13:32:42 crc kubenswrapper[4762]: I1009 13:32:42.928035 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d"} Oct 09 13:32:42 crc kubenswrapper[4762]: I1009 13:32:42.928614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859"} Oct 09 13:32:42 crc kubenswrapper[4762]: I1009 13:32:42.928645 4762 scope.go:117] "RemoveContainer" containerID="be56bae2e58091d7381288b22608ea1d9ff05c002d923b3dc62b87fe4d4dfdc6" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.075225 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-z7sl4"] Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.076540 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.086502 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-z7sl4"] Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161374 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-installation-pull-secrets\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161432 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-trusted-ca\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161461 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-certificates\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161493 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-tls\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161529 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161566 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-bound-sa-token\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-ca-trust-extracted\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.161613 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrxcx\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-kube-api-access-hrxcx\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.186251 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262479 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-installation-pull-secrets\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262543 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-trusted-ca\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262570 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-certificates\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262592 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-tls\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262645 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-bound-sa-token\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262661 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-ca-trust-extracted\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.262682 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrxcx\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-kube-api-access-hrxcx\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.263659 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-ca-trust-extracted\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.263999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-trusted-ca\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.264053 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-certificates\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.274845 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-registry-tls\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.274858 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-installation-pull-secrets\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.278746 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrxcx\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-kube-api-access-hrxcx\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.279225 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc-bound-sa-token\") pod \"image-registry-66df7c8f76-z7sl4\" (UID: \"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc\") " pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.409755 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:21 crc kubenswrapper[4762]: I1009 13:33:21.609455 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-z7sl4"] Oct 09 13:33:22 crc kubenswrapper[4762]: I1009 13:33:22.139307 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" event={"ID":"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc","Type":"ContainerStarted","Data":"095cb6d25988bea2a06163cdc732af51e425ccfbc0abc299223cf35e304fa880"} Oct 09 13:33:22 crc kubenswrapper[4762]: I1009 13:33:22.139616 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" event={"ID":"e8d8a05e-5ad0-4f38-80c9-18aad1fa17dc","Type":"ContainerStarted","Data":"a5df0bb564a8cc36afa03a31b8bccff060263cd28208258d963b94329302da39"} Oct 09 13:33:22 crc kubenswrapper[4762]: I1009 13:33:22.139818 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:22 crc kubenswrapper[4762]: I1009 13:33:22.156765 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" podStartSLOduration=1.156743471 podStartE2EDuration="1.156743471s" podCreationTimestamp="2025-10-09 13:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:33:22.155354625 +0000 UTC m=+477.929145684" watchObservedRunningTime="2025-10-09 13:33:22.156743471 +0000 UTC m=+477.930534510" Oct 09 13:33:41 crc kubenswrapper[4762]: I1009 13:33:41.414942 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-z7sl4" Oct 09 13:33:41 crc kubenswrapper[4762]: I1009 13:33:41.489930 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:34:06 crc kubenswrapper[4762]: I1009 13:34:06.552656 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" podUID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" containerName="registry" containerID="cri-o://c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c" gracePeriod=30 Oct 09 13:34:06 crc kubenswrapper[4762]: I1009 13:34:06.907378 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065415 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjqgq\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065471 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065529 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065720 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065748 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065767 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.065817 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.066272 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates\") pod \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\" (UID: \"913065b9-29bd-4c9a-8d6e-e319bf91efbd\") " Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.066512 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.070198 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.072785 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.072894 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.073108 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.073489 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq" (OuterVolumeSpecName: "kube-api-access-gjqgq") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "kube-api-access-gjqgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.074626 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.089816 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "913065b9-29bd-4c9a-8d6e-e319bf91efbd" (UID: "913065b9-29bd-4c9a-8d6e-e319bf91efbd"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168449 4762 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/913065b9-29bd-4c9a-8d6e-e319bf91efbd-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168516 4762 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168540 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjqgq\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-kube-api-access-gjqgq\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168561 4762 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168581 4762 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/913065b9-29bd-4c9a-8d6e-e319bf91efbd-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168599 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/913065b9-29bd-4c9a-8d6e-e319bf91efbd-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.168619 4762 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/913065b9-29bd-4c9a-8d6e-e319bf91efbd-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.384589 4762 generic.go:334] "Generic (PLEG): container finished" podID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" containerID="c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c" exitCode=0 Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.384684 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" event={"ID":"913065b9-29bd-4c9a-8d6e-e319bf91efbd","Type":"ContainerDied","Data":"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c"} Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.384770 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.385494 4762 scope.go:117] "RemoveContainer" containerID="c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.385291 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2gj5n" event={"ID":"913065b9-29bd-4c9a-8d6e-e319bf91efbd","Type":"ContainerDied","Data":"0fb8d0a740eafaa490823a38c4c92a4bd7d2e3aecf8358ea7c4c8ff66421bda8"} Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.425693 4762 scope.go:117] "RemoveContainer" containerID="c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c" Oct 09 13:34:07 crc kubenswrapper[4762]: E1009 13:34:07.426286 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c\": container with ID starting with c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c not found: ID does not exist" containerID="c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.426354 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c"} err="failed to get container status \"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c\": rpc error: code = NotFound desc = could not find container \"c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c\": container with ID starting with c01b219cf3ac714381c9c2eb7c79ff922b5308decb4e40bc5d2023634227f34c not found: ID does not exist" Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.443919 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:34:07 crc kubenswrapper[4762]: I1009 13:34:07.452857 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2gj5n"] Oct 09 13:34:08 crc kubenswrapper[4762]: I1009 13:34:08.974760 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" path="/var/lib/kubelet/pods/913065b9-29bd-4c9a-8d6e-e319bf91efbd/volumes" Oct 09 13:35:11 crc kubenswrapper[4762]: I1009 13:35:11.969395 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:35:11 crc kubenswrapper[4762]: I1009 13:35:11.970105 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:35:41 crc kubenswrapper[4762]: I1009 13:35:41.969980 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:35:41 crc kubenswrapper[4762]: I1009 13:35:41.970806 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:36:11 crc kubenswrapper[4762]: I1009 13:36:11.969786 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:36:11 crc kubenswrapper[4762]: I1009 13:36:11.970553 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:36:11 crc kubenswrapper[4762]: I1009 13:36:11.970620 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:36:11 crc kubenswrapper[4762]: I1009 13:36:11.971519 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:36:11 crc kubenswrapper[4762]: I1009 13:36:11.971599 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859" gracePeriod=600 Oct 09 13:36:12 crc kubenswrapper[4762]: I1009 13:36:12.126902 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859" exitCode=0 Oct 09 13:36:12 crc kubenswrapper[4762]: I1009 13:36:12.126975 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859"} Oct 09 13:36:12 crc kubenswrapper[4762]: I1009 13:36:12.127308 4762 scope.go:117] "RemoveContainer" containerID="e0998965989e7f1018b34ffe0db01df609728cd821e59eb5e3e7fea8c3cafc4d" Oct 09 13:36:13 crc kubenswrapper[4762]: I1009 13:36:13.135739 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b"} Oct 09 13:37:59 crc kubenswrapper[4762]: I1009 13:37:59.714359 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:37:59 crc kubenswrapper[4762]: I1009 13:37:59.715428 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerName="controller-manager" containerID="cri-o://7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c" gracePeriod=30 Oct 09 13:37:59 crc kubenswrapper[4762]: I1009 13:37:59.820868 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:37:59 crc kubenswrapper[4762]: I1009 13:37:59.821358 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerName="route-controller-manager" containerID="cri-o://5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f" gracePeriod=30 Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.038530 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.161446 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.217802 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert\") pod \"5be8bc93-cb89-4cc7-822d-739708bab8a9\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218040 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qq5qs\" (UniqueName: \"kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs\") pod \"5be8bc93-cb89-4cc7-822d-739708bab8a9\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca\") pod \"5be8bc93-cb89-4cc7-822d-739708bab8a9\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218118 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles\") pod \"5be8bc93-cb89-4cc7-822d-739708bab8a9\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218135 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tknm4\" (UniqueName: \"kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4\") pod \"414ea71c-9878-4a8a-825f-3d18ff8460d9\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218160 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config\") pod \"5be8bc93-cb89-4cc7-822d-739708bab8a9\" (UID: \"5be8bc93-cb89-4cc7-822d-739708bab8a9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218206 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca\") pod \"414ea71c-9878-4a8a-825f-3d18ff8460d9\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218224 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert\") pod \"414ea71c-9878-4a8a-825f-3d18ff8460d9\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218687 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5be8bc93-cb89-4cc7-822d-739708bab8a9" (UID: "5be8bc93-cb89-4cc7-822d-739708bab8a9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.218710 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5be8bc93-cb89-4cc7-822d-739708bab8a9" (UID: "5be8bc93-cb89-4cc7-822d-739708bab8a9"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.219042 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca" (OuterVolumeSpecName: "client-ca") pod "414ea71c-9878-4a8a-825f-3d18ff8460d9" (UID: "414ea71c-9878-4a8a-825f-3d18ff8460d9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.219117 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config" (OuterVolumeSpecName: "config") pod "5be8bc93-cb89-4cc7-822d-739708bab8a9" (UID: "5be8bc93-cb89-4cc7-822d-739708bab8a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.229915 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4" (OuterVolumeSpecName: "kube-api-access-tknm4") pod "414ea71c-9878-4a8a-825f-3d18ff8460d9" (UID: "414ea71c-9878-4a8a-825f-3d18ff8460d9"). InnerVolumeSpecName "kube-api-access-tknm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.229965 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "414ea71c-9878-4a8a-825f-3d18ff8460d9" (UID: "414ea71c-9878-4a8a-825f-3d18ff8460d9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.231023 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5be8bc93-cb89-4cc7-822d-739708bab8a9" (UID: "5be8bc93-cb89-4cc7-822d-739708bab8a9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.231231 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs" (OuterVolumeSpecName: "kube-api-access-qq5qs") pod "5be8bc93-cb89-4cc7-822d-739708bab8a9" (UID: "5be8bc93-cb89-4cc7-822d-739708bab8a9"). InnerVolumeSpecName "kube-api-access-qq5qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319335 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config\") pod \"414ea71c-9878-4a8a-825f-3d18ff8460d9\" (UID: \"414ea71c-9878-4a8a-825f-3d18ff8460d9\") " Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319694 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5be8bc93-cb89-4cc7-822d-739708bab8a9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319717 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qq5qs\" (UniqueName: \"kubernetes.io/projected/5be8bc93-cb89-4cc7-822d-739708bab8a9-kube-api-access-qq5qs\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319731 4762 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319742 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319753 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tknm4\" (UniqueName: \"kubernetes.io/projected/414ea71c-9878-4a8a-825f-3d18ff8460d9-kube-api-access-tknm4\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319764 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5be8bc93-cb89-4cc7-822d-739708bab8a9-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319774 4762 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.319785 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/414ea71c-9878-4a8a-825f-3d18ff8460d9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.320510 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config" (OuterVolumeSpecName: "config") pod "414ea71c-9878-4a8a-825f-3d18ff8460d9" (UID: "414ea71c-9878-4a8a-825f-3d18ff8460d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.420671 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/414ea71c-9878-4a8a-825f-3d18ff8460d9-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.793425 4762 generic.go:334] "Generic (PLEG): container finished" podID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerID="5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f" exitCode=0 Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.793491 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.793480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" event={"ID":"414ea71c-9878-4a8a-825f-3d18ff8460d9","Type":"ContainerDied","Data":"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f"} Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.793609 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8" event={"ID":"414ea71c-9878-4a8a-825f-3d18ff8460d9","Type":"ContainerDied","Data":"fd528e46b687a21b47574fffcd2e91e21b67d3cc31a2f9dbb5d240be3c29e0fc"} Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.793665 4762 scope.go:117] "RemoveContainer" containerID="5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.796031 4762 generic.go:334] "Generic (PLEG): container finished" podID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerID="7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c" exitCode=0 Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.796056 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" event={"ID":"5be8bc93-cb89-4cc7-822d-739708bab8a9","Type":"ContainerDied","Data":"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c"} Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.796072 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" event={"ID":"5be8bc93-cb89-4cc7-822d-739708bab8a9","Type":"ContainerDied","Data":"2a35dda04c99c041fd7d16fd1fbf447fb488e68d292462e81cb60aa72e30f0e2"} Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.796113 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xzd8z" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.809582 4762 scope.go:117] "RemoveContainer" containerID="5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f" Oct 09 13:38:00 crc kubenswrapper[4762]: E1009 13:38:00.810993 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f\": container with ID starting with 5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f not found: ID does not exist" containerID="5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.811030 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f"} err="failed to get container status \"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f\": rpc error: code = NotFound desc = could not find container \"5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f\": container with ID starting with 5cb3bd0a707bb90f57c922a217d35071deefb8ae38e8d921ef58c043db6c262f not found: ID does not exist" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.811057 4762 scope.go:117] "RemoveContainer" containerID="7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.822154 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.825965 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xzd8z"] Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.829208 4762 scope.go:117] "RemoveContainer" containerID="7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c" Oct 09 13:38:00 crc kubenswrapper[4762]: E1009 13:38:00.829694 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c\": container with ID starting with 7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c not found: ID does not exist" containerID="7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.829729 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c"} err="failed to get container status \"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c\": rpc error: code = NotFound desc = could not find container \"7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c\": container with ID starting with 7ca26ecab799b3f8f8aeb980f91d3f8d8e99ab7a7d1b77db45bd21f7f598c05c not found: ID does not exist" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.834345 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.837582 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-c4rs8"] Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.971480 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" path="/var/lib/kubelet/pods/414ea71c-9878-4a8a-825f-3d18ff8460d9/volumes" Oct 09 13:38:00 crc kubenswrapper[4762]: I1009 13:38:00.971989 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" path="/var/lib/kubelet/pods/5be8bc93-cb89-4cc7-822d-739708bab8a9/volumes" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.013738 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2"] Oct 09 13:38:01 crc kubenswrapper[4762]: E1009 13:38:01.014053 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerName="controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014075 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerName="controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: E1009 13:38:01.014095 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerName="route-controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014108 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerName="route-controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: E1009 13:38:01.014136 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" containerName="registry" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014146 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" containerName="registry" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014315 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5be8bc93-cb89-4cc7-822d-739708bab8a9" containerName="controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014345 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="414ea71c-9878-4a8a-825f-3d18ff8460d9" containerName="route-controller-manager" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014360 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="913065b9-29bd-4c9a-8d6e-e319bf91efbd" containerName="registry" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.014827 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.015103 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.015531 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.017681 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.018109 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.018527 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.018545 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.019123 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.026982 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.027039 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.027045 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.028974 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029210 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029264 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-config\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029304 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029348 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-client-ca\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029391 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029422 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56mg2\" (UniqueName: \"kubernetes.io/projected/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-kube-api-access-56mg2\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029523 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029692 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029722 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029735 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029758 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029771 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hd2n\" (UniqueName: \"kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.029839 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-serving-cert\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.031446 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2"] Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.037145 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130132 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130184 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hd2n\" (UniqueName: \"kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130205 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-serving-cert\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130236 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130250 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-config\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130266 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130294 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130308 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-client-ca\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.130325 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56mg2\" (UniqueName: \"kubernetes.io/projected/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-kube-api-access-56mg2\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.133099 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.133700 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.133922 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.134419 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-client-ca\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.137111 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-config\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.142672 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-serving-cert\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.142907 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.148772 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hd2n\" (UniqueName: \"kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n\") pod \"controller-manager-76cf5dccfb-k4fh9\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.152745 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56mg2\" (UniqueName: \"kubernetes.io/projected/ae9d2d9f-3b35-46c7-8924-cb4771b78ea3-kube-api-access-56mg2\") pod \"route-controller-manager-5b97984b67-shpj2\" (UID: \"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3\") " pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.276168 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.276581 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.356336 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.526100 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.566811 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2"] Oct 09 13:38:01 crc kubenswrapper[4762]: W1009 13:38:01.571082 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae9d2d9f_3b35_46c7_8924_cb4771b78ea3.slice/crio-ee0c9c5419e312c06e280ab09c09dbd75f4f4a86a02d096264361a1b428c4cfb WatchSource:0}: Error finding container ee0c9c5419e312c06e280ab09c09dbd75f4f4a86a02d096264361a1b428c4cfb: Status 404 returned error can't find the container with id ee0c9c5419e312c06e280ab09c09dbd75f4f4a86a02d096264361a1b428c4cfb Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.804614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" event={"ID":"0626d087-633d-4a90-aee0-8ec33ec5742b","Type":"ContainerStarted","Data":"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a"} Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.806262 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" event={"ID":"0626d087-633d-4a90-aee0-8ec33ec5742b","Type":"ContainerStarted","Data":"8cbc1509feb02013ee4b5548d559c21a16301e960424b19197be7b71e95866cb"} Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.806299 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.804785 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" podUID="0626d087-633d-4a90-aee0-8ec33ec5742b" containerName="controller-manager" containerID="cri-o://062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a" gracePeriod=30 Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.806962 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" event={"ID":"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3","Type":"ContainerStarted","Data":"61afc3c87b3258f4b561377d7a459723ec72d93a2ffb501a403b997cb18a08de"} Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.807033 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" event={"ID":"ae9d2d9f-3b35-46c7-8924-cb4771b78ea3","Type":"ContainerStarted","Data":"ee0c9c5419e312c06e280ab09c09dbd75f4f4a86a02d096264361a1b428c4cfb"} Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.807672 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.812717 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.821324 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" podStartSLOduration=2.821308025 podStartE2EDuration="2.821308025s" podCreationTimestamp="2025-10-09 13:37:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:38:01.819759785 +0000 UTC m=+757.593550824" watchObservedRunningTime="2025-10-09 13:38:01.821308025 +0000 UTC m=+757.595099064" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.841598 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" podStartSLOduration=2.841574361 podStartE2EDuration="2.841574361s" podCreationTimestamp="2025-10-09 13:37:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:38:01.840141984 +0000 UTC m=+757.613933033" watchObservedRunningTime="2025-10-09 13:38:01.841574361 +0000 UTC m=+757.615365400" Oct 09 13:38:01 crc kubenswrapper[4762]: I1009 13:38:01.893686 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b97984b67-shpj2" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.168115 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.344501 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hd2n\" (UniqueName: \"kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n\") pod \"0626d087-633d-4a90-aee0-8ec33ec5742b\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.344565 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert\") pod \"0626d087-633d-4a90-aee0-8ec33ec5742b\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.344644 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config\") pod \"0626d087-633d-4a90-aee0-8ec33ec5742b\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.344678 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles\") pod \"0626d087-633d-4a90-aee0-8ec33ec5742b\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.344716 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca\") pod \"0626d087-633d-4a90-aee0-8ec33ec5742b\" (UID: \"0626d087-633d-4a90-aee0-8ec33ec5742b\") " Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.345528 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0626d087-633d-4a90-aee0-8ec33ec5742b" (UID: "0626d087-633d-4a90-aee0-8ec33ec5742b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.345629 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config" (OuterVolumeSpecName: "config") pod "0626d087-633d-4a90-aee0-8ec33ec5742b" (UID: "0626d087-633d-4a90-aee0-8ec33ec5742b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.345700 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca" (OuterVolumeSpecName: "client-ca") pod "0626d087-633d-4a90-aee0-8ec33ec5742b" (UID: "0626d087-633d-4a90-aee0-8ec33ec5742b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.356874 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0626d087-633d-4a90-aee0-8ec33ec5742b" (UID: "0626d087-633d-4a90-aee0-8ec33ec5742b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.356934 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n" (OuterVolumeSpecName: "kube-api-access-6hd2n") pod "0626d087-633d-4a90-aee0-8ec33ec5742b" (UID: "0626d087-633d-4a90-aee0-8ec33ec5742b"). InnerVolumeSpecName "kube-api-access-6hd2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.445746 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hd2n\" (UniqueName: \"kubernetes.io/projected/0626d087-633d-4a90-aee0-8ec33ec5742b-kube-api-access-6hd2n\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.445788 4762 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0626d087-633d-4a90-aee0-8ec33ec5742b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.445805 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.445817 4762 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.445829 4762 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0626d087-633d-4a90-aee0-8ec33ec5742b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.819680 4762 generic.go:334] "Generic (PLEG): container finished" podID="0626d087-633d-4a90-aee0-8ec33ec5742b" containerID="062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a" exitCode=0 Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.819744 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.819798 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" event={"ID":"0626d087-633d-4a90-aee0-8ec33ec5742b","Type":"ContainerDied","Data":"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a"} Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.819835 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9" event={"ID":"0626d087-633d-4a90-aee0-8ec33ec5742b","Type":"ContainerDied","Data":"8cbc1509feb02013ee4b5548d559c21a16301e960424b19197be7b71e95866cb"} Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.819857 4762 scope.go:117] "RemoveContainer" containerID="062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.864313 4762 scope.go:117] "RemoveContainer" containerID="062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.864410 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:02 crc kubenswrapper[4762]: E1009 13:38:02.864740 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a\": container with ID starting with 062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a not found: ID does not exist" containerID="062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.864786 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a"} err="failed to get container status \"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a\": rpc error: code = NotFound desc = could not find container \"062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a\": container with ID starting with 062161fdc3a007314abc57423f44c0c73649569f6b010b9ca9bc4427feefce4a not found: ID does not exist" Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.866455 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-76cf5dccfb-k4fh9"] Oct 09 13:38:02 crc kubenswrapper[4762]: I1009 13:38:02.974975 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0626d087-633d-4a90-aee0-8ec33ec5742b" path="/var/lib/kubelet/pods/0626d087-633d-4a90-aee0-8ec33ec5742b/volumes" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.012370 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-69967674d-b2glg"] Oct 09 13:38:03 crc kubenswrapper[4762]: E1009 13:38:03.012568 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0626d087-633d-4a90-aee0-8ec33ec5742b" containerName="controller-manager" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.012584 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0626d087-633d-4a90-aee0-8ec33ec5742b" containerName="controller-manager" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.012721 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0626d087-633d-4a90-aee0-8ec33ec5742b" containerName="controller-manager" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.013126 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.015377 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.015550 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.015585 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.015997 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.016067 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.016170 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.023148 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69967674d-b2glg"] Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.023852 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.153971 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-client-ca\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.154083 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljkxp\" (UniqueName: \"kubernetes.io/projected/06c3a063-f61e-4845-b1e6-73e5d650de21-kube-api-access-ljkxp\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.154226 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c3a063-f61e-4845-b1e6-73e5d650de21-serving-cert\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.154275 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-config\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.154332 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-proxy-ca-bundles\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.255555 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-client-ca\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.255615 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljkxp\" (UniqueName: \"kubernetes.io/projected/06c3a063-f61e-4845-b1e6-73e5d650de21-kube-api-access-ljkxp\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.255689 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c3a063-f61e-4845-b1e6-73e5d650de21-serving-cert\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.255712 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-config\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.255739 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-proxy-ca-bundles\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.256793 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-proxy-ca-bundles\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.256796 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-client-ca\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.258440 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c3a063-f61e-4845-b1e6-73e5d650de21-config\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.265615 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c3a063-f61e-4845-b1e6-73e5d650de21-serving-cert\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.275191 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljkxp\" (UniqueName: \"kubernetes.io/projected/06c3a063-f61e-4845-b1e6-73e5d650de21-kube-api-access-ljkxp\") pod \"controller-manager-69967674d-b2glg\" (UID: \"06c3a063-f61e-4845-b1e6-73e5d650de21\") " pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.330813 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.502843 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69967674d-b2glg"] Oct 09 13:38:03 crc kubenswrapper[4762]: W1009 13:38:03.516472 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c3a063_f61e_4845_b1e6_73e5d650de21.slice/crio-ebf234e5603bc9855629f9c5a5481c7309778dbfedb2ee179b94e4d73fc9aa53 WatchSource:0}: Error finding container ebf234e5603bc9855629f9c5a5481c7309778dbfedb2ee179b94e4d73fc9aa53: Status 404 returned error can't find the container with id ebf234e5603bc9855629f9c5a5481c7309778dbfedb2ee179b94e4d73fc9aa53 Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.827060 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" event={"ID":"06c3a063-f61e-4845-b1e6-73e5d650de21","Type":"ContainerStarted","Data":"0d817d8aced6b217573feec8cf3a16aec6f33b31c33a9548cf9a9a0fc3623cf2"} Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.827115 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" event={"ID":"06c3a063-f61e-4845-b1e6-73e5d650de21","Type":"ContainerStarted","Data":"ebf234e5603bc9855629f9c5a5481c7309778dbfedb2ee179b94e4d73fc9aa53"} Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.827570 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.836851 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" Oct 09 13:38:03 crc kubenswrapper[4762]: I1009 13:38:03.855745 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-69967674d-b2glg" podStartSLOduration=2.855723064 podStartE2EDuration="2.855723064s" podCreationTimestamp="2025-10-09 13:38:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:38:03.855299423 +0000 UTC m=+759.629090472" watchObservedRunningTime="2025-10-09 13:38:03.855723064 +0000 UTC m=+759.629514103" Oct 09 13:38:08 crc kubenswrapper[4762]: I1009 13:38:08.606372 4762 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.101213 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jl67g"] Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.102402 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-controller" containerID="cri-o://4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.102956 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="sbdb" containerID="cri-o://8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.103027 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="nbdb" containerID="cri-o://1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.103082 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="northd" containerID="cri-o://e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.103137 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.103238 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-node" containerID="cri-o://5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.103309 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-acl-logging" containerID="cri-o://443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.159701 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" containerID="cri-o://8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" gracePeriod=30 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.378682 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/3.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.381200 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovn-acl-logging/0.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.381650 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovn-controller/0.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.382070 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428031 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j4f8g"] Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428457 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-node" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428476 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-node" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428491 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428500 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428511 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="sbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428519 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="sbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428527 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428534 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428545 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428552 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428562 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428570 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428583 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="northd" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428591 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="northd" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428599 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="nbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428606 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="nbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428615 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kubecfg-setup" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428623 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kubecfg-setup" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428653 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428661 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428671 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-acl-logging" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428709 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-acl-logging" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.428718 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428726 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428843 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="northd" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428860 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428871 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428880 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428891 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428900 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="nbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428908 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428919 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428931 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="sbdb" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428944 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-acl-logging" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428951 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovn-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.428961 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="kube-rbac-proxy-node" Oct 09 13:38:22 crc kubenswrapper[4762]: E1009 13:38:22.429100 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.429108 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="92662de9-9784-432a-92d2-a668f815e8fd" containerName="ovnkube-controller" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.430977 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.455971 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456036 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg6s6\" (UniqueName: \"kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456062 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456082 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456098 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456113 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456134 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456155 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456171 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456191 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456225 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456254 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456272 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456285 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456299 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456318 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456341 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456363 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456387 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert\") pod \"92662de9-9784-432a-92d2-a668f815e8fd\" (UID: \"92662de9-9784-432a-92d2-a668f815e8fd\") " Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.456901 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log" (OuterVolumeSpecName: "node-log") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457024 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457069 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457443 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457510 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457537 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket" (OuterVolumeSpecName: "log-socket") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457564 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457594 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457601 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457626 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457652 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457665 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash" (OuterVolumeSpecName: "host-slash") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457683 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457683 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.457721 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.458070 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.458092 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.464027 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.464046 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6" (OuterVolumeSpecName: "kube-api-access-kg6s6") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "kube-api-access-kg6s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.471951 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "92662de9-9784-432a-92d2-a668f815e8fd" (UID: "92662de9-9784-432a-92d2-a668f815e8fd"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558189 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-netd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558231 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-systemd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558249 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-bin\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558266 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-script-lib\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558309 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-config\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558441 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-env-overrides\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558509 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-var-lib-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558554 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-systemd-units\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558582 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmrp6\" (UniqueName: \"kubernetes.io/projected/bd53b7b2-b8dd-4775-b9bc-d49856699640-kube-api-access-kmrp6\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558600 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-ovn\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558615 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-node-log\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558676 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-kubelet\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558741 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-etc-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558763 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558780 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558801 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovn-node-metrics-cert\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558826 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-slash\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558857 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-log-socket\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558881 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-netns\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558919 4762 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558929 4762 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558938 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/92662de9-9784-432a-92d2-a668f815e8fd-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558947 4762 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558957 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg6s6\" (UniqueName: \"kubernetes.io/projected/92662de9-9784-432a-92d2-a668f815e8fd-kube-api-access-kg6s6\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558967 4762 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558975 4762 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558983 4762 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.558992 4762 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559001 4762 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-log-socket\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559009 4762 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559018 4762 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559025 4762 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/92662de9-9784-432a-92d2-a668f815e8fd-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559035 4762 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559043 4762 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-slash\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559051 4762 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559058 4762 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559066 4762 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-node-log\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559073 4762 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.559081 4762 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/92662de9-9784-432a-92d2-a668f815e8fd-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.659921 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-config\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.659997 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-env-overrides\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660040 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-systemd-units\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660071 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-var-lib-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660106 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmrp6\" (UniqueName: \"kubernetes.io/projected/bd53b7b2-b8dd-4775-b9bc-d49856699640-kube-api-access-kmrp6\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660142 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-ovn\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660172 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-node-log\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660215 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-kubelet\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660251 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-etc-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660282 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660314 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660357 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovn-node-metrics-cert\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-slash\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660432 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-log-socket\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660459 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660491 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-netns\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660522 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-netd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660553 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-systemd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660583 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-bin\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660614 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-script-lib\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660772 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-ovn\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660821 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-netd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660864 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-systemd-units\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660829 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-var-lib-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660906 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-kubelet\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660925 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-netns\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660956 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-systemd\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.660992 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-node-log\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661034 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-cni-bin\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661058 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-slash\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661058 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661061 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-etc-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-log-socket\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661086 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-host-run-ovn-kubernetes\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-env-overrides\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661479 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-config\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661610 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bd53b7b2-b8dd-4775-b9bc-d49856699640-run-openvswitch\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.661851 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovnkube-script-lib\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.665449 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bd53b7b2-b8dd-4775-b9bc-d49856699640-ovn-node-metrics-cert\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.692305 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmrp6\" (UniqueName: \"kubernetes.io/projected/bd53b7b2-b8dd-4775-b9bc-d49856699640-kube-api-access-kmrp6\") pod \"ovnkube-node-j4f8g\" (UID: \"bd53b7b2-b8dd-4775-b9bc-d49856699640\") " pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.765236 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:22 crc kubenswrapper[4762]: W1009 13:38:22.783414 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd53b7b2_b8dd_4775_b9bc_d49856699640.slice/crio-53eefc4cc5d5ccd2d52ce19cf2599512499ae3e5bf88d800bc401974947484b9 WatchSource:0}: Error finding container 53eefc4cc5d5ccd2d52ce19cf2599512499ae3e5bf88d800bc401974947484b9: Status 404 returned error can't find the container with id 53eefc4cc5d5ccd2d52ce19cf2599512499ae3e5bf88d800bc401974947484b9 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.925050 4762 generic.go:334] "Generic (PLEG): container finished" podID="bd53b7b2-b8dd-4775-b9bc-d49856699640" containerID="2fb15884bbb1a92028303da9f9b93c8913ec7e647e49012d1356c2f8e6e1165a" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.925128 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerDied","Data":"2fb15884bbb1a92028303da9f9b93c8913ec7e647e49012d1356c2f8e6e1165a"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.925418 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"53eefc4cc5d5ccd2d52ce19cf2599512499ae3e5bf88d800bc401974947484b9"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.930322 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovnkube-controller/3.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.933714 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovn-acl-logging/0.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934189 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jl67g_92662de9-9784-432a-92d2-a668f815e8fd/ovn-controller/0.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934676 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934706 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934714 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934723 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934730 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934740 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" exitCode=0 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934747 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" exitCode=143 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934757 4762 generic.go:334] "Generic (PLEG): container finished" podID="92662de9-9784-432a-92d2-a668f815e8fd" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" exitCode=143 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934749 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934792 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934815 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934831 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934843 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934854 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934867 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934880 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934892 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934898 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934905 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934913 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934919 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934926 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934933 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934890 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.934940 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935038 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935069 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935078 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935083 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935089 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935093 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935098 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935103 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935110 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935115 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935120 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935127 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935136 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935142 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935147 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935152 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935158 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935162 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935168 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935174 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935179 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935184 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jl67g" event={"ID":"92662de9-9784-432a-92d2-a668f815e8fd","Type":"ContainerDied","Data":"f408a16ef88e2fa16ef441be0512eb3c8832da8b38734f105b06f7f0682d5668"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935202 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935209 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935216 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935223 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935229 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935236 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935242 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935249 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935257 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.935264 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.938326 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/2.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.938948 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/1.log" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.938997 4762 generic.go:334] "Generic (PLEG): container finished" podID="c847aae6-277a-45dc-86d0-9b175f7e8177" containerID="c4e90ed0cd948905cef0fd260cd724a92fc07f4002a7151e2fb955ec9ae0bb6f" exitCode=2 Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.939026 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerDied","Data":"c4e90ed0cd948905cef0fd260cd724a92fc07f4002a7151e2fb955ec9ae0bb6f"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.939042 4762 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af"} Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.939500 4762 scope.go:117] "RemoveContainer" containerID="c4e90ed0cd948905cef0fd260cd724a92fc07f4002a7151e2fb955ec9ae0bb6f" Oct 09 13:38:22 crc kubenswrapper[4762]: I1009 13:38:22.975211 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.011791 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jl67g"] Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.016885 4762 scope.go:117] "RemoveContainer" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.021666 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jl67g"] Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.033921 4762 scope.go:117] "RemoveContainer" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.054806 4762 scope.go:117] "RemoveContainer" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.066760 4762 scope.go:117] "RemoveContainer" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.090988 4762 scope.go:117] "RemoveContainer" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.109734 4762 scope.go:117] "RemoveContainer" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.130411 4762 scope.go:117] "RemoveContainer" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.145523 4762 scope.go:117] "RemoveContainer" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.176687 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.177183 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.177226 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} err="failed to get container status \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.177251 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.177683 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": container with ID starting with 25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c not found: ID does not exist" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.177726 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} err="failed to get container status \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": rpc error: code = NotFound desc = could not find container \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": container with ID starting with 25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.177758 4762 scope.go:117] "RemoveContainer" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.178775 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": container with ID starting with 8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498 not found: ID does not exist" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.178834 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} err="failed to get container status \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": rpc error: code = NotFound desc = could not find container \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": container with ID starting with 8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.178873 4762 scope.go:117] "RemoveContainer" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.179205 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": container with ID starting with 1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d not found: ID does not exist" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179231 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} err="failed to get container status \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": rpc error: code = NotFound desc = could not find container \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": container with ID starting with 1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179246 4762 scope.go:117] "RemoveContainer" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.179510 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": container with ID starting with e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5 not found: ID does not exist" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179546 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} err="failed to get container status \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": rpc error: code = NotFound desc = could not find container \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": container with ID starting with e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179569 4762 scope.go:117] "RemoveContainer" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.179924 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": container with ID starting with 9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0 not found: ID does not exist" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179957 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} err="failed to get container status \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": rpc error: code = NotFound desc = could not find container \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": container with ID starting with 9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.179977 4762 scope.go:117] "RemoveContainer" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.180252 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": container with ID starting with 5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15 not found: ID does not exist" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180285 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} err="failed to get container status \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": rpc error: code = NotFound desc = could not find container \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": container with ID starting with 5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180304 4762 scope.go:117] "RemoveContainer" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.180564 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": container with ID starting with 443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567 not found: ID does not exist" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180587 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} err="failed to get container status \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": rpc error: code = NotFound desc = could not find container \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": container with ID starting with 443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180601 4762 scope.go:117] "RemoveContainer" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.180842 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": container with ID starting with 4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989 not found: ID does not exist" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180868 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} err="failed to get container status \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": rpc error: code = NotFound desc = could not find container \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": container with ID starting with 4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.180886 4762 scope.go:117] "RemoveContainer" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: E1009 13:38:23.181137 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": container with ID starting with bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e not found: ID does not exist" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181168 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} err="failed to get container status \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": rpc error: code = NotFound desc = could not find container \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": container with ID starting with bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181187 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181423 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} err="failed to get container status \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181441 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181699 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} err="failed to get container status \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": rpc error: code = NotFound desc = could not find container \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": container with ID starting with 25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181729 4762 scope.go:117] "RemoveContainer" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181932 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} err="failed to get container status \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": rpc error: code = NotFound desc = could not find container \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": container with ID starting with 8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.181961 4762 scope.go:117] "RemoveContainer" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.182179 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} err="failed to get container status \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": rpc error: code = NotFound desc = could not find container \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": container with ID starting with 1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.182207 4762 scope.go:117] "RemoveContainer" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.182475 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} err="failed to get container status \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": rpc error: code = NotFound desc = could not find container \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": container with ID starting with e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.182531 4762 scope.go:117] "RemoveContainer" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.183230 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} err="failed to get container status \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": rpc error: code = NotFound desc = could not find container \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": container with ID starting with 9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.183260 4762 scope.go:117] "RemoveContainer" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.183890 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} err="failed to get container status \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": rpc error: code = NotFound desc = could not find container \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": container with ID starting with 5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.183917 4762 scope.go:117] "RemoveContainer" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184168 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} err="failed to get container status \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": rpc error: code = NotFound desc = could not find container \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": container with ID starting with 443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184195 4762 scope.go:117] "RemoveContainer" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184431 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} err="failed to get container status \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": rpc error: code = NotFound desc = could not find container \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": container with ID starting with 4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184451 4762 scope.go:117] "RemoveContainer" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184723 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} err="failed to get container status \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": rpc error: code = NotFound desc = could not find container \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": container with ID starting with bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184765 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.184975 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} err="failed to get container status \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185001 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185226 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} err="failed to get container status \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": rpc error: code = NotFound desc = could not find container \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": container with ID starting with 25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185245 4762 scope.go:117] "RemoveContainer" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185444 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} err="failed to get container status \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": rpc error: code = NotFound desc = could not find container \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": container with ID starting with 8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185463 4762 scope.go:117] "RemoveContainer" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185657 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} err="failed to get container status \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": rpc error: code = NotFound desc = could not find container \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": container with ID starting with 1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185680 4762 scope.go:117] "RemoveContainer" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185877 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} err="failed to get container status \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": rpc error: code = NotFound desc = could not find container \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": container with ID starting with e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.185899 4762 scope.go:117] "RemoveContainer" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186078 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} err="failed to get container status \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": rpc error: code = NotFound desc = could not find container \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": container with ID starting with 9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186100 4762 scope.go:117] "RemoveContainer" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186265 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} err="failed to get container status \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": rpc error: code = NotFound desc = could not find container \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": container with ID starting with 5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186287 4762 scope.go:117] "RemoveContainer" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186462 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} err="failed to get container status \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": rpc error: code = NotFound desc = could not find container \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": container with ID starting with 443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186483 4762 scope.go:117] "RemoveContainer" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186724 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} err="failed to get container status \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": rpc error: code = NotFound desc = could not find container \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": container with ID starting with 4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186748 4762 scope.go:117] "RemoveContainer" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186954 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} err="failed to get container status \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": rpc error: code = NotFound desc = could not find container \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": container with ID starting with bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.186977 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187164 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} err="failed to get container status \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187186 4762 scope.go:117] "RemoveContainer" containerID="25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187365 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c"} err="failed to get container status \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": rpc error: code = NotFound desc = could not find container \"25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c\": container with ID starting with 25146c86fc625c912a6b7aab957ed31039aeede7d329cce8f28f0dd01f5ff39c not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187383 4762 scope.go:117] "RemoveContainer" containerID="8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187551 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498"} err="failed to get container status \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": rpc error: code = NotFound desc = could not find container \"8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498\": container with ID starting with 8f5be977653547c33b4d2d5184688120b32866045e3b18a08be1c7c406d6b498 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187572 4762 scope.go:117] "RemoveContainer" containerID="1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187824 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d"} err="failed to get container status \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": rpc error: code = NotFound desc = could not find container \"1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d\": container with ID starting with 1a6b9ddcf6f9632e0ab1ac7f145c90d4c1e404b44f6e4fdc547fa42a4736448d not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.187845 4762 scope.go:117] "RemoveContainer" containerID="e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188185 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5"} err="failed to get container status \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": rpc error: code = NotFound desc = could not find container \"e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5\": container with ID starting with e1fe15b6fa2a4089c0ef0b19180a44b570bf28aeb719e8fb5c960c16f3bc3ee5 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188208 4762 scope.go:117] "RemoveContainer" containerID="9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188409 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0"} err="failed to get container status \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": rpc error: code = NotFound desc = could not find container \"9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0\": container with ID starting with 9dbd3d536f2ff0e46947e1516b3b9def208d490f5e62bbde5bebf37690d26ac0 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188431 4762 scope.go:117] "RemoveContainer" containerID="5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188660 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15"} err="failed to get container status \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": rpc error: code = NotFound desc = could not find container \"5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15\": container with ID starting with 5c409df5880861cef6885822a19dc9bbe481342a849c18ac11c85a60fcee0f15 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188683 4762 scope.go:117] "RemoveContainer" containerID="443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188886 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567"} err="failed to get container status \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": rpc error: code = NotFound desc = could not find container \"443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567\": container with ID starting with 443d1d116c58f5d8b2c5fc9051baf914244cb0776b1f912d11fe4316a0ec0567 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.188910 4762 scope.go:117] "RemoveContainer" containerID="4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.189117 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989"} err="failed to get container status \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": rpc error: code = NotFound desc = could not find container \"4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989\": container with ID starting with 4a6b026d474235bbb7b31530b4628a10c35b22baf4ee49759f28a9beb8177989 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.189137 4762 scope.go:117] "RemoveContainer" containerID="bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.189346 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e"} err="failed to get container status \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": rpc error: code = NotFound desc = could not find container \"bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e\": container with ID starting with bd03e6b064630a1cd71d2e88a4e99d513b30d0f225516ce8030cba879fe2422e not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.189365 4762 scope.go:117] "RemoveContainer" containerID="8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.189654 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36"} err="failed to get container status \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": rpc error: code = NotFound desc = could not find container \"8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36\": container with ID starting with 8cf0a81a9603a5e2e4e71e6b43b705cc16135ef584be6e1235de4285f9d14d36 not found: ID does not exist" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947583 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"931047f8eb679934014fec6400303e111519f8723385bc1eb7d56a4fcff43fb4"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947925 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"591dc667382ff4a6765289302b64e478861e48e8a03f2f947ac8cb1cbf41be6f"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947944 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"9396c1bb017122fee03777940218511a09c1a3fb2b18a087472f6b3dc6884abf"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947955 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"c1ddc5a3e0435292394cc12cf0d4549a66bac43d096b8c6829ad6e91f34a73b0"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947967 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"16d08ca2b6b97a8b56e48428d397d1fc6f56ba5a6fb2d9da1e0bdd11058f760b"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.947978 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"ca9c1a36be313c628bcb706be8ad0d395677c28c03df3ebb24c881545d06f786"} Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.950486 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/2.log" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.950916 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/1.log" Oct 09 13:38:23 crc kubenswrapper[4762]: I1009 13:38:23.951021 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9wtqb" event={"ID":"c847aae6-277a-45dc-86d0-9b175f7e8177","Type":"ContainerStarted","Data":"7b667bee7510f2e945baac24438ecc56b5856da8ade54e96c63c0c3a7c46fb6b"} Oct 09 13:38:24 crc kubenswrapper[4762]: I1009 13:38:24.974717 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92662de9-9784-432a-92d2-a668f815e8fd" path="/var/lib/kubelet/pods/92662de9-9784-432a-92d2-a668f815e8fd/volumes" Oct 09 13:38:25 crc kubenswrapper[4762]: I1009 13:38:25.140276 4762 scope.go:117] "RemoveContainer" containerID="0f95093a278093400b0692e3f8cbe050c5586fbcf1ad29c7b758613ab6ac76af" Oct 09 13:38:25 crc kubenswrapper[4762]: I1009 13:38:25.965117 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"2fd7cbe078941647870055903912372bde08400ada6fe2bde789076353bdadda"} Oct 09 13:38:25 crc kubenswrapper[4762]: I1009 13:38:25.966976 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9wtqb_c847aae6-277a-45dc-86d0-9b175f7e8177/kube-multus/2.log" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.791288 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-dlp9b"] Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.792912 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.794561 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.794956 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.795462 4762 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nhclq" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.797428 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.920661 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rdm5\" (UniqueName: \"kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.920725 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:26 crc kubenswrapper[4762]: I1009 13:38:26.920835 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.022061 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rdm5\" (UniqueName: \"kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.022149 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.022183 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.022546 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.023193 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.040359 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rdm5\" (UniqueName: \"kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5\") pod \"crc-storage-crc-dlp9b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: I1009 13:38:27.109947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: E1009 13:38:27.136302 4762 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(2d39201b769338ada9af5f0dbc438946efa096c0028cdc7cfc3cef9dad0c004b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:38:27 crc kubenswrapper[4762]: E1009 13:38:27.136374 4762 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(2d39201b769338ada9af5f0dbc438946efa096c0028cdc7cfc3cef9dad0c004b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: E1009 13:38:27.136393 4762 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(2d39201b769338ada9af5f0dbc438946efa096c0028cdc7cfc3cef9dad0c004b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:27 crc kubenswrapper[4762]: E1009 13:38:27.136438 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-dlp9b_crc-storage(2bdef39d-b324-48f1-a0f7-0f52130e848b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-dlp9b_crc-storage(2bdef39d-b324-48f1-a0f7-0f52130e848b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(2d39201b769338ada9af5f0dbc438946efa096c0028cdc7cfc3cef9dad0c004b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-dlp9b" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.746454 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dlp9b"] Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.746966 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.747317 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:28 crc kubenswrapper[4762]: E1009 13:38:28.773312 4762 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(7d0a9f4bcf31a2d8eb0b18895283e439bd4981a79d545d2faa5fbd8fe2c71479): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 13:38:28 crc kubenswrapper[4762]: E1009 13:38:28.773377 4762 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(7d0a9f4bcf31a2d8eb0b18895283e439bd4981a79d545d2faa5fbd8fe2c71479): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:28 crc kubenswrapper[4762]: E1009 13:38:28.773398 4762 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(7d0a9f4bcf31a2d8eb0b18895283e439bd4981a79d545d2faa5fbd8fe2c71479): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:28 crc kubenswrapper[4762]: E1009 13:38:28.773436 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-dlp9b_crc-storage(2bdef39d-b324-48f1-a0f7-0f52130e848b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-dlp9b_crc-storage(2bdef39d-b324-48f1-a0f7-0f52130e848b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-dlp9b_crc-storage_2bdef39d-b324-48f1-a0f7-0f52130e848b_0(7d0a9f4bcf31a2d8eb0b18895283e439bd4981a79d545d2faa5fbd8fe2c71479): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-dlp9b" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.986674 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" event={"ID":"bd53b7b2-b8dd-4775-b9bc-d49856699640","Type":"ContainerStarted","Data":"a865bfb0186be5c6e60f1b2537db42c3a9a48d65129f08db911d7f35f359fa60"} Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.987139 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.987187 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:28 crc kubenswrapper[4762]: I1009 13:38:28.987203 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:29 crc kubenswrapper[4762]: I1009 13:38:29.018031 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" podStartSLOduration=7.018012107 podStartE2EDuration="7.018012107s" podCreationTimestamp="2025-10-09 13:38:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:38:29.017512604 +0000 UTC m=+784.791303663" watchObservedRunningTime="2025-10-09 13:38:29.018012107 +0000 UTC m=+784.791803146" Oct 09 13:38:29 crc kubenswrapper[4762]: I1009 13:38:29.022487 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:29 crc kubenswrapper[4762]: I1009 13:38:29.032029 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:40 crc kubenswrapper[4762]: I1009 13:38:40.964913 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:40 crc kubenswrapper[4762]: I1009 13:38:40.966234 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:41 crc kubenswrapper[4762]: I1009 13:38:41.377837 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dlp9b"] Oct 09 13:38:41 crc kubenswrapper[4762]: W1009 13:38:41.389086 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bdef39d_b324_48f1_a0f7_0f52130e848b.slice/crio-6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865 WatchSource:0}: Error finding container 6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865: Status 404 returned error can't find the container with id 6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865 Oct 09 13:38:41 crc kubenswrapper[4762]: I1009 13:38:41.391552 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 13:38:41 crc kubenswrapper[4762]: I1009 13:38:41.969554 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:38:41 crc kubenswrapper[4762]: I1009 13:38:41.970187 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:38:42 crc kubenswrapper[4762]: I1009 13:38:42.090080 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dlp9b" event={"ID":"2bdef39d-b324-48f1-a0f7-0f52130e848b","Type":"ContainerStarted","Data":"6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865"} Oct 09 13:38:50 crc kubenswrapper[4762]: I1009 13:38:50.133486 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dlp9b" event={"ID":"2bdef39d-b324-48f1-a0f7-0f52130e848b","Type":"ContainerStarted","Data":"f2340c19b01df8a1c6798b2c2daa291801c168b1de24bacf25f0e083c9792dcf"} Oct 09 13:38:50 crc kubenswrapper[4762]: I1009 13:38:50.148425 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-dlp9b" podStartSLOduration=15.880997604000001 podStartE2EDuration="24.148409646s" podCreationTimestamp="2025-10-09 13:38:26 +0000 UTC" firstStartedPulling="2025-10-09 13:38:41.391349295 +0000 UTC m=+797.165140334" lastFinishedPulling="2025-10-09 13:38:49.658761287 +0000 UTC m=+805.432552376" observedRunningTime="2025-10-09 13:38:50.1463503 +0000 UTC m=+805.920141339" watchObservedRunningTime="2025-10-09 13:38:50.148409646 +0000 UTC m=+805.922200685" Oct 09 13:38:51 crc kubenswrapper[4762]: I1009 13:38:51.141024 4762 generic.go:334] "Generic (PLEG): container finished" podID="2bdef39d-b324-48f1-a0f7-0f52130e848b" containerID="f2340c19b01df8a1c6798b2c2daa291801c168b1de24bacf25f0e083c9792dcf" exitCode=0 Oct 09 13:38:51 crc kubenswrapper[4762]: I1009 13:38:51.141086 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dlp9b" event={"ID":"2bdef39d-b324-48f1-a0f7-0f52130e848b","Type":"ContainerDied","Data":"f2340c19b01df8a1c6798b2c2daa291801c168b1de24bacf25f0e083c9792dcf"} Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.374151 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.442495 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt\") pod \"2bdef39d-b324-48f1-a0f7-0f52130e848b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.442554 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage\") pod \"2bdef39d-b324-48f1-a0f7-0f52130e848b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.442577 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "2bdef39d-b324-48f1-a0f7-0f52130e848b" (UID: "2bdef39d-b324-48f1-a0f7-0f52130e848b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.442615 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rdm5\" (UniqueName: \"kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5\") pod \"2bdef39d-b324-48f1-a0f7-0f52130e848b\" (UID: \"2bdef39d-b324-48f1-a0f7-0f52130e848b\") " Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.442902 4762 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2bdef39d-b324-48f1-a0f7-0f52130e848b-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.448185 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5" (OuterVolumeSpecName: "kube-api-access-2rdm5") pod "2bdef39d-b324-48f1-a0f7-0f52130e848b" (UID: "2bdef39d-b324-48f1-a0f7-0f52130e848b"). InnerVolumeSpecName "kube-api-access-2rdm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.455334 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "2bdef39d-b324-48f1-a0f7-0f52130e848b" (UID: "2bdef39d-b324-48f1-a0f7-0f52130e848b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.543559 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rdm5\" (UniqueName: \"kubernetes.io/projected/2bdef39d-b324-48f1-a0f7-0f52130e848b-kube-api-access-2rdm5\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.543593 4762 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2bdef39d-b324-48f1-a0f7-0f52130e848b-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 09 13:38:52 crc kubenswrapper[4762]: I1009 13:38:52.786879 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j4f8g" Oct 09 13:38:53 crc kubenswrapper[4762]: I1009 13:38:53.154340 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dlp9b" event={"ID":"2bdef39d-b324-48f1-a0f7-0f52130e848b","Type":"ContainerDied","Data":"6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865"} Oct 09 13:38:53 crc kubenswrapper[4762]: I1009 13:38:53.154378 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a0054f631054df5dd2b0ba9815a6bfc6b107c196b008d6b77911666628ce865" Oct 09 13:38:53 crc kubenswrapper[4762]: I1009 13:38:53.154420 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dlp9b" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.594665 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x"] Oct 09 13:38:58 crc kubenswrapper[4762]: E1009 13:38:58.596432 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" containerName="storage" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.596534 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" containerName="storage" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.596786 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" containerName="storage" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.597794 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.609876 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.614038 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x"] Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.723372 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.723434 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.723511 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hqsb\" (UniqueName: \"kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.824567 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hqsb\" (UniqueName: \"kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.824892 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.824981 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.825617 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.825923 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.853386 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hqsb\" (UniqueName: \"kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:58 crc kubenswrapper[4762]: I1009 13:38:58.915508 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:38:59 crc kubenswrapper[4762]: I1009 13:38:59.329519 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x"] Oct 09 13:38:59 crc kubenswrapper[4762]: W1009 13:38:59.337253 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb49f0a42_424a_4b34_8adf_904ac4164e8f.slice/crio-ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc WatchSource:0}: Error finding container ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc: Status 404 returned error can't find the container with id ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.203241 4762 generic.go:334] "Generic (PLEG): container finished" podID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerID="1174f21fb617717afc6a153f30ae7bf3083b9ccbe1e5099f2dc98d34290f04c0" exitCode=0 Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.203286 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" event={"ID":"b49f0a42-424a-4b34-8adf-904ac4164e8f","Type":"ContainerDied","Data":"1174f21fb617717afc6a153f30ae7bf3083b9ccbe1e5099f2dc98d34290f04c0"} Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.203310 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" event={"ID":"b49f0a42-424a-4b34-8adf-904ac4164e8f","Type":"ContainerStarted","Data":"ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc"} Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.951575 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.952556 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:00 crc kubenswrapper[4762]: I1009 13:39:00.975420 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.052110 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.052351 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.052384 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qc6v\" (UniqueName: \"kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.153027 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.153073 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qc6v\" (UniqueName: \"kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.153142 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.153558 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.154016 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.172595 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qc6v\" (UniqueName: \"kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v\") pod \"redhat-operators-gqdkd\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.275927 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:01 crc kubenswrapper[4762]: I1009 13:39:01.475494 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:01 crc kubenswrapper[4762]: W1009 13:39:01.480154 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda36a5c15_529c_47f8_81cd_67941004add7.slice/crio-1e601f4bf2837d891b146d0164790690fedb2f7b54f48125a956fccf8367e0cb WatchSource:0}: Error finding container 1e601f4bf2837d891b146d0164790690fedb2f7b54f48125a956fccf8367e0cb: Status 404 returned error can't find the container with id 1e601f4bf2837d891b146d0164790690fedb2f7b54f48125a956fccf8367e0cb Oct 09 13:39:02 crc kubenswrapper[4762]: I1009 13:39:02.215448 4762 generic.go:334] "Generic (PLEG): container finished" podID="a36a5c15-529c-47f8-81cd-67941004add7" containerID="922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4" exitCode=0 Oct 09 13:39:02 crc kubenswrapper[4762]: I1009 13:39:02.215519 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerDied","Data":"922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4"} Oct 09 13:39:02 crc kubenswrapper[4762]: I1009 13:39:02.215841 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerStarted","Data":"1e601f4bf2837d891b146d0164790690fedb2f7b54f48125a956fccf8367e0cb"} Oct 09 13:39:03 crc kubenswrapper[4762]: I1009 13:39:03.223430 4762 generic.go:334] "Generic (PLEG): container finished" podID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerID="bcaec18ae60a77efb6751cd879ba54c3a2b7ff675e5e4b167cdbb727fd67655c" exitCode=0 Oct 09 13:39:03 crc kubenswrapper[4762]: I1009 13:39:03.223478 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" event={"ID":"b49f0a42-424a-4b34-8adf-904ac4164e8f","Type":"ContainerDied","Data":"bcaec18ae60a77efb6751cd879ba54c3a2b7ff675e5e4b167cdbb727fd67655c"} Oct 09 13:39:04 crc kubenswrapper[4762]: I1009 13:39:04.231376 4762 generic.go:334] "Generic (PLEG): container finished" podID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerID="aff0f327d2955b3a1c1295b357c543fd5925097612883de523f61778123b4f5a" exitCode=0 Oct 09 13:39:04 crc kubenswrapper[4762]: I1009 13:39:04.231441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" event={"ID":"b49f0a42-424a-4b34-8adf-904ac4164e8f","Type":"ContainerDied","Data":"aff0f327d2955b3a1c1295b357c543fd5925097612883de523f61778123b4f5a"} Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.238451 4762 generic.go:334] "Generic (PLEG): container finished" podID="a36a5c15-529c-47f8-81cd-67941004add7" containerID="e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145" exitCode=0 Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.238497 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerDied","Data":"e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145"} Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.509877 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.604518 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hqsb\" (UniqueName: \"kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb\") pod \"b49f0a42-424a-4b34-8adf-904ac4164e8f\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.604625 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle\") pod \"b49f0a42-424a-4b34-8adf-904ac4164e8f\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.604671 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util\") pod \"b49f0a42-424a-4b34-8adf-904ac4164e8f\" (UID: \"b49f0a42-424a-4b34-8adf-904ac4164e8f\") " Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.605402 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle" (OuterVolumeSpecName: "bundle") pod "b49f0a42-424a-4b34-8adf-904ac4164e8f" (UID: "b49f0a42-424a-4b34-8adf-904ac4164e8f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.609961 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb" (OuterVolumeSpecName: "kube-api-access-7hqsb") pod "b49f0a42-424a-4b34-8adf-904ac4164e8f" (UID: "b49f0a42-424a-4b34-8adf-904ac4164e8f"). InnerVolumeSpecName "kube-api-access-7hqsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.614424 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util" (OuterVolumeSpecName: "util") pod "b49f0a42-424a-4b34-8adf-904ac4164e8f" (UID: "b49f0a42-424a-4b34-8adf-904ac4164e8f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.706071 4762 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.706112 4762 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b49f0a42-424a-4b34-8adf-904ac4164e8f-util\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:05 crc kubenswrapper[4762]: I1009 13:39:05.706126 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hqsb\" (UniqueName: \"kubernetes.io/projected/b49f0a42-424a-4b34-8adf-904ac4164e8f-kube-api-access-7hqsb\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:06 crc kubenswrapper[4762]: I1009 13:39:06.245800 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerStarted","Data":"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20"} Oct 09 13:39:06 crc kubenswrapper[4762]: I1009 13:39:06.248185 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" event={"ID":"b49f0a42-424a-4b34-8adf-904ac4164e8f","Type":"ContainerDied","Data":"ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc"} Oct 09 13:39:06 crc kubenswrapper[4762]: I1009 13:39:06.248215 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce9f792dc08f5ea9bf480cb26673e3a7a22e913d93692087a55d57e36fcbd0fc" Oct 09 13:39:06 crc kubenswrapper[4762]: I1009 13:39:06.248259 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x" Oct 09 13:39:06 crc kubenswrapper[4762]: I1009 13:39:06.262180 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gqdkd" podStartSLOduration=3.005306945 podStartE2EDuration="6.262165095s" podCreationTimestamp="2025-10-09 13:39:00 +0000 UTC" firstStartedPulling="2025-10-09 13:39:02.52702641 +0000 UTC m=+818.300817449" lastFinishedPulling="2025-10-09 13:39:05.78388453 +0000 UTC m=+821.557675599" observedRunningTime="2025-10-09 13:39:06.259766262 +0000 UTC m=+822.033557321" watchObservedRunningTime="2025-10-09 13:39:06.262165095 +0000 UTC m=+822.035956134" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.140121 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bskzv"] Oct 09 13:39:10 crc kubenswrapper[4762]: E1009 13:39:10.140649 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="pull" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.140662 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="pull" Oct 09 13:39:10 crc kubenswrapper[4762]: E1009 13:39:10.140684 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="util" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.140690 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="util" Oct 09 13:39:10 crc kubenswrapper[4762]: E1009 13:39:10.140702 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="extract" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.140707 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="extract" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.140816 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b49f0a42-424a-4b34-8adf-904ac4164e8f" containerName="extract" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.141211 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.143299 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-6ssdk" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.143942 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.145890 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.152920 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bskzv"] Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.272373 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwqh8\" (UniqueName: \"kubernetes.io/projected/6abae913-1b37-4721-941f-5aa3b6803c77-kube-api-access-rwqh8\") pod \"nmstate-operator-858ddd8f98-bskzv\" (UID: \"6abae913-1b37-4721-941f-5aa3b6803c77\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.373672 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwqh8\" (UniqueName: \"kubernetes.io/projected/6abae913-1b37-4721-941f-5aa3b6803c77-kube-api-access-rwqh8\") pod \"nmstate-operator-858ddd8f98-bskzv\" (UID: \"6abae913-1b37-4721-941f-5aa3b6803c77\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.392127 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwqh8\" (UniqueName: \"kubernetes.io/projected/6abae913-1b37-4721-941f-5aa3b6803c77-kube-api-access-rwqh8\") pod \"nmstate-operator-858ddd8f98-bskzv\" (UID: \"6abae913-1b37-4721-941f-5aa3b6803c77\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.459045 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" Oct 09 13:39:10 crc kubenswrapper[4762]: I1009 13:39:10.835495 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bskzv"] Oct 09 13:39:10 crc kubenswrapper[4762]: W1009 13:39:10.844112 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6abae913_1b37_4721_941f_5aa3b6803c77.slice/crio-9e37a3efa78ea36aa3b6ffff6befcd8dd0ab6e89dde6de82e46ee95ab0154ed1 WatchSource:0}: Error finding container 9e37a3efa78ea36aa3b6ffff6befcd8dd0ab6e89dde6de82e46ee95ab0154ed1: Status 404 returned error can't find the container with id 9e37a3efa78ea36aa3b6ffff6befcd8dd0ab6e89dde6de82e46ee95ab0154ed1 Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.272178 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" event={"ID":"6abae913-1b37-4721-941f-5aa3b6803c77","Type":"ContainerStarted","Data":"9e37a3efa78ea36aa3b6ffff6befcd8dd0ab6e89dde6de82e46ee95ab0154ed1"} Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.276394 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.276428 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.315373 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.969232 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:39:11 crc kubenswrapper[4762]: I1009 13:39:11.969555 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:39:12 crc kubenswrapper[4762]: I1009 13:39:12.312927 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:13 crc kubenswrapper[4762]: I1009 13:39:13.942445 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:14 crc kubenswrapper[4762]: I1009 13:39:14.287265 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gqdkd" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="registry-server" containerID="cri-o://6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20" gracePeriod=2 Oct 09 13:39:14 crc kubenswrapper[4762]: I1009 13:39:14.971747 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.136210 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qc6v\" (UniqueName: \"kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v\") pod \"a36a5c15-529c-47f8-81cd-67941004add7\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.136319 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities\") pod \"a36a5c15-529c-47f8-81cd-67941004add7\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.136422 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content\") pod \"a36a5c15-529c-47f8-81cd-67941004add7\" (UID: \"a36a5c15-529c-47f8-81cd-67941004add7\") " Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.137651 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities" (OuterVolumeSpecName: "utilities") pod "a36a5c15-529c-47f8-81cd-67941004add7" (UID: "a36a5c15-529c-47f8-81cd-67941004add7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.143159 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v" (OuterVolumeSpecName: "kube-api-access-6qc6v") pod "a36a5c15-529c-47f8-81cd-67941004add7" (UID: "a36a5c15-529c-47f8-81cd-67941004add7"). InnerVolumeSpecName "kube-api-access-6qc6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.238149 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qc6v\" (UniqueName: \"kubernetes.io/projected/a36a5c15-529c-47f8-81cd-67941004add7-kube-api-access-6qc6v\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.238190 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.295018 4762 generic.go:334] "Generic (PLEG): container finished" podID="a36a5c15-529c-47f8-81cd-67941004add7" containerID="6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20" exitCode=0 Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.295087 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerDied","Data":"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20"} Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.295155 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqdkd" event={"ID":"a36a5c15-529c-47f8-81cd-67941004add7","Type":"ContainerDied","Data":"1e601f4bf2837d891b146d0164790690fedb2f7b54f48125a956fccf8367e0cb"} Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.295174 4762 scope.go:117] "RemoveContainer" containerID="6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.295113 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqdkd" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.312825 4762 scope.go:117] "RemoveContainer" containerID="e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.334471 4762 scope.go:117] "RemoveContainer" containerID="922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.353396 4762 scope.go:117] "RemoveContainer" containerID="6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20" Oct 09 13:39:15 crc kubenswrapper[4762]: E1009 13:39:15.353732 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20\": container with ID starting with 6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20 not found: ID does not exist" containerID="6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.353772 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20"} err="failed to get container status \"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20\": rpc error: code = NotFound desc = could not find container \"6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20\": container with ID starting with 6c1d78b4c87f4043292ab50e17e84105d1233b4d8c54b39ba8d8484b3e98dc20 not found: ID does not exist" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.353799 4762 scope.go:117] "RemoveContainer" containerID="e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145" Oct 09 13:39:15 crc kubenswrapper[4762]: E1009 13:39:15.354036 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145\": container with ID starting with e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145 not found: ID does not exist" containerID="e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.354064 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145"} err="failed to get container status \"e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145\": rpc error: code = NotFound desc = could not find container \"e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145\": container with ID starting with e3093da929684519c463e3a4b72d50bef2ffce11b39f42fe4a16063d818c8145 not found: ID does not exist" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.354086 4762 scope.go:117] "RemoveContainer" containerID="922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4" Oct 09 13:39:15 crc kubenswrapper[4762]: E1009 13:39:15.354302 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4\": container with ID starting with 922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4 not found: ID does not exist" containerID="922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.354327 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4"} err="failed to get container status \"922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4\": rpc error: code = NotFound desc = could not find container \"922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4\": container with ID starting with 922c8b9dbd044bb9200873e0a396c82de6da3bf9cd0655e3cebbcd617dd64ac4 not found: ID does not exist" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.429483 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a36a5c15-529c-47f8-81cd-67941004add7" (UID: "a36a5c15-529c-47f8-81cd-67941004add7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.439784 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a36a5c15-529c-47f8-81cd-67941004add7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.644303 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:15 crc kubenswrapper[4762]: I1009 13:39:15.649204 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gqdkd"] Oct 09 13:39:16 crc kubenswrapper[4762]: I1009 13:39:16.976519 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a36a5c15-529c-47f8-81cd-67941004add7" path="/var/lib/kubelet/pods/a36a5c15-529c-47f8-81cd-67941004add7/volumes" Oct 09 13:39:17 crc kubenswrapper[4762]: I1009 13:39:17.310430 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" event={"ID":"6abae913-1b37-4721-941f-5aa3b6803c77","Type":"ContainerStarted","Data":"3163c8f41e7553b57173c04334043cd291f16e4bc51850f75b17bf878372d25c"} Oct 09 13:39:17 crc kubenswrapper[4762]: I1009 13:39:17.328048 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bskzv" podStartSLOduration=2.032767788 podStartE2EDuration="7.328024503s" podCreationTimestamp="2025-10-09 13:39:10 +0000 UTC" firstStartedPulling="2025-10-09 13:39:10.845695088 +0000 UTC m=+826.619486127" lastFinishedPulling="2025-10-09 13:39:16.140951803 +0000 UTC m=+831.914742842" observedRunningTime="2025-10-09 13:39:17.32791412 +0000 UTC m=+833.101705189" watchObservedRunningTime="2025-10-09 13:39:17.328024503 +0000 UTC m=+833.101815552" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.909691 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz"] Oct 09 13:39:18 crc kubenswrapper[4762]: E1009 13:39:18.910242 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="extract-utilities" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.910259 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="extract-utilities" Oct 09 13:39:18 crc kubenswrapper[4762]: E1009 13:39:18.910276 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="registry-server" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.910284 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="registry-server" Oct 09 13:39:18 crc kubenswrapper[4762]: E1009 13:39:18.910298 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="extract-content" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.910306 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="extract-content" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.910427 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a36a5c15-529c-47f8-81cd-67941004add7" containerName="registry-server" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.911093 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.913867 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-588p8" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.920717 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k"] Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.921419 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.923501 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.943796 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k"] Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.955651 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-hvkc5"] Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.956534 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.985509 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz"] Oct 09 13:39:18 crc kubenswrapper[4762]: I1009 13:39:18.986593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-946ft\" (UniqueName: \"kubernetes.io/projected/334a2699-c1e4-42e0-b2ce-a2f699f1347c-kube-api-access-946ft\") pod \"nmstate-metrics-fdff9cb8d-wgxbz\" (UID: \"334a2699-c1e4-42e0-b2ce-a2f699f1347c\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.047811 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88"] Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.048879 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.050676 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-vnsfx" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.051213 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.054326 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.054750 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88"] Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087753 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-946ft\" (UniqueName: \"kubernetes.io/projected/334a2699-c1e4-42e0-b2ce-a2f699f1347c-kube-api-access-946ft\") pod \"nmstate-metrics-fdff9cb8d-wgxbz\" (UID: \"334a2699-c1e4-42e0-b2ce-a2f699f1347c\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087810 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-ovs-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087838 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2e9b466f-eddb-464a-b245-1008e19793b0-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5hdq\" (UniqueName: \"kubernetes.io/projected/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-kube-api-access-n5hdq\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087876 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-dbus-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087905 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-nmstate-lock\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.087927 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtx6g\" (UniqueName: \"kubernetes.io/projected/2e9b466f-eddb-464a-b245-1008e19793b0-kube-api-access-dtx6g\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.110096 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-946ft\" (UniqueName: \"kubernetes.io/projected/334a2699-c1e4-42e0-b2ce-a2f699f1347c-kube-api-access-946ft\") pod \"nmstate-metrics-fdff9cb8d-wgxbz\" (UID: \"334a2699-c1e4-42e0-b2ce-a2f699f1347c\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.188850 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtx6g\" (UniqueName: \"kubernetes.io/projected/2e9b466f-eddb-464a-b245-1008e19793b0-kube-api-access-dtx6g\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189155 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/54b5d227-e033-49ec-b96f-74ec617c74cd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189209 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-ovs-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189259 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/54b5d227-e033-49ec-b96f-74ec617c74cd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189286 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2e9b466f-eddb-464a-b245-1008e19793b0-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189304 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w62v4\" (UniqueName: \"kubernetes.io/projected/54b5d227-e033-49ec-b96f-74ec617c74cd-kube-api-access-w62v4\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189325 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5hdq\" (UniqueName: \"kubernetes.io/projected/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-kube-api-access-n5hdq\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189345 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-dbus-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189354 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-ovs-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189373 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-nmstate-lock\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189919 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-dbus-socket\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.189941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-nmstate-lock\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.193479 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2e9b466f-eddb-464a-b245-1008e19793b0-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.205953 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5hdq\" (UniqueName: \"kubernetes.io/projected/cfd67a5a-8008-4191-a683-5f5c19ccc8c9-kube-api-access-n5hdq\") pod \"nmstate-handler-hvkc5\" (UID: \"cfd67a5a-8008-4191-a683-5f5c19ccc8c9\") " pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.207208 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtx6g\" (UniqueName: \"kubernetes.io/projected/2e9b466f-eddb-464a-b245-1008e19793b0-kube-api-access-dtx6g\") pod \"nmstate-webhook-6cdbc54649-cpm9k\" (UID: \"2e9b466f-eddb-464a-b245-1008e19793b0\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.230487 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.236983 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-86c7f89d77-7vp5d"] Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.250198 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86c7f89d77-7vp5d"] Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.250334 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.269815 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.270102 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.290007 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/54b5d227-e033-49ec-b96f-74ec617c74cd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.290079 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w62v4\" (UniqueName: \"kubernetes.io/projected/54b5d227-e033-49ec-b96f-74ec617c74cd-kube-api-access-w62v4\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.290133 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/54b5d227-e033-49ec-b96f-74ec617c74cd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.291277 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/54b5d227-e033-49ec-b96f-74ec617c74cd-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.295045 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/54b5d227-e033-49ec-b96f-74ec617c74cd-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: W1009 13:39:19.303746 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfd67a5a_8008_4191_a683_5f5c19ccc8c9.slice/crio-f6114217f2eed3e0282de45a4e101198ab2bdba470d8137f0ddf41783d547a1d WatchSource:0}: Error finding container f6114217f2eed3e0282de45a4e101198ab2bdba470d8137f0ddf41783d547a1d: Status 404 returned error can't find the container with id f6114217f2eed3e0282de45a4e101198ab2bdba470d8137f0ddf41783d547a1d Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.305821 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w62v4\" (UniqueName: \"kubernetes.io/projected/54b5d227-e033-49ec-b96f-74ec617c74cd-kube-api-access-w62v4\") pod \"nmstate-console-plugin-6b874cbd85-ntz88\" (UID: \"54b5d227-e033-49ec-b96f-74ec617c74cd\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.322344 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-hvkc5" event={"ID":"cfd67a5a-8008-4191-a683-5f5c19ccc8c9","Type":"ContainerStarted","Data":"f6114217f2eed3e0282de45a4e101198ab2bdba470d8137f0ddf41783d547a1d"} Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.362906 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391626 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-service-ca\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391775 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-oauth-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391833 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-oauth-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391868 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391902 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391925 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mfch\" (UniqueName: \"kubernetes.io/projected/9f10b793-9f7a-4fbf-b49f-7758b82a69af-kube-api-access-6mfch\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.391953 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-trusted-ca-bundle\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493297 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mfch\" (UniqueName: \"kubernetes.io/projected/9f10b793-9f7a-4fbf-b49f-7758b82a69af-kube-api-access-6mfch\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493348 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-trusted-ca-bundle\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493384 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-service-ca\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493420 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-oauth-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493471 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-oauth-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493503 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.493540 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.496073 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.496084 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-oauth-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.496069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-service-ca\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.496322 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f10b793-9f7a-4fbf-b49f-7758b82a69af-trusted-ca-bundle\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.499694 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-oauth-config\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.499925 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9f10b793-9f7a-4fbf-b49f-7758b82a69af-console-serving-cert\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.513554 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mfch\" (UniqueName: \"kubernetes.io/projected/9f10b793-9f7a-4fbf-b49f-7758b82a69af-kube-api-access-6mfch\") pod \"console-86c7f89d77-7vp5d\" (UID: \"9f10b793-9f7a-4fbf-b49f-7758b82a69af\") " pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.599745 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.666329 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz"] Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.728161 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k"] Oct 09 13:39:19 crc kubenswrapper[4762]: W1009 13:39:19.738957 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e9b466f_eddb_464a_b245_1008e19793b0.slice/crio-ab942528af6c46df21b065080feac7825a2ecf1f5e502765e00261e31b9ed121 WatchSource:0}: Error finding container ab942528af6c46df21b065080feac7825a2ecf1f5e502765e00261e31b9ed121: Status 404 returned error can't find the container with id ab942528af6c46df21b065080feac7825a2ecf1f5e502765e00261e31b9ed121 Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.795380 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88"] Oct 09 13:39:19 crc kubenswrapper[4762]: W1009 13:39:19.798560 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54b5d227_e033_49ec_b96f_74ec617c74cd.slice/crio-14a798220ef306b6a149597999bd4e618ac73eecf50dd124db9790c140d9b8bd WatchSource:0}: Error finding container 14a798220ef306b6a149597999bd4e618ac73eecf50dd124db9790c140d9b8bd: Status 404 returned error can't find the container with id 14a798220ef306b6a149597999bd4e618ac73eecf50dd124db9790c140d9b8bd Oct 09 13:39:19 crc kubenswrapper[4762]: I1009 13:39:19.994506 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-86c7f89d77-7vp5d"] Oct 09 13:39:19 crc kubenswrapper[4762]: W1009 13:39:19.997876 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f10b793_9f7a_4fbf_b49f_7758b82a69af.slice/crio-8d15d58a72f501d9c2dbd988b000d473381cea425fee2db1c4f55c9e2a952158 WatchSource:0}: Error finding container 8d15d58a72f501d9c2dbd988b000d473381cea425fee2db1c4f55c9e2a952158: Status 404 returned error can't find the container with id 8d15d58a72f501d9c2dbd988b000d473381cea425fee2db1c4f55c9e2a952158 Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.328730 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" event={"ID":"334a2699-c1e4-42e0-b2ce-a2f699f1347c","Type":"ContainerStarted","Data":"7a8ed2178627f773584c1f6fdc255bbeab8cfe0feb3b42ebec2d7d83e8906c74"} Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.331538 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" event={"ID":"54b5d227-e033-49ec-b96f-74ec617c74cd","Type":"ContainerStarted","Data":"14a798220ef306b6a149597999bd4e618ac73eecf50dd124db9790c140d9b8bd"} Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.333422 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c7f89d77-7vp5d" event={"ID":"9f10b793-9f7a-4fbf-b49f-7758b82a69af","Type":"ContainerStarted","Data":"6bd80d1e71a0ea52a346efab0dc3948d8d6e51bc6808a4657cf7944e2902db94"} Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.333473 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-86c7f89d77-7vp5d" event={"ID":"9f10b793-9f7a-4fbf-b49f-7758b82a69af","Type":"ContainerStarted","Data":"8d15d58a72f501d9c2dbd988b000d473381cea425fee2db1c4f55c9e2a952158"} Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.336786 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" event={"ID":"2e9b466f-eddb-464a-b245-1008e19793b0","Type":"ContainerStarted","Data":"ab942528af6c46df21b065080feac7825a2ecf1f5e502765e00261e31b9ed121"} Oct 09 13:39:20 crc kubenswrapper[4762]: I1009 13:39:20.349563 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-86c7f89d77-7vp5d" podStartSLOduration=1.3495293959999999 podStartE2EDuration="1.349529396s" podCreationTimestamp="2025-10-09 13:39:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:39:20.34895688 +0000 UTC m=+836.122747919" watchObservedRunningTime="2025-10-09 13:39:20.349529396 +0000 UTC m=+836.123320475" Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.351688 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-hvkc5" event={"ID":"cfd67a5a-8008-4191-a683-5f5c19ccc8c9","Type":"ContainerStarted","Data":"2c8c788946d46c77cd45a7af16ef1647f7081ef49c0bf9830356361d6a380fec"} Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.352061 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.353742 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" event={"ID":"334a2699-c1e4-42e0-b2ce-a2f699f1347c","Type":"ContainerStarted","Data":"c97e527e40268b53afb6b4d51466f51f9c8a1d5034b0748e872b8b21761b911d"} Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.355732 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" event={"ID":"54b5d227-e033-49ec-b96f-74ec617c74cd","Type":"ContainerStarted","Data":"1b1bafee585afd2fda721c62586b10cb1abb761c20dc7c92722508792d5fbb0b"} Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.357543 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" event={"ID":"2e9b466f-eddb-464a-b245-1008e19793b0","Type":"ContainerStarted","Data":"773e803d1e5bbcdd16ca00c218d93da1b7216f3f16d59443c5e0ec975719eb5f"} Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.357839 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.369963 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-hvkc5" podStartSLOduration=1.934148838 podStartE2EDuration="5.369935331s" podCreationTimestamp="2025-10-09 13:39:18 +0000 UTC" firstStartedPulling="2025-10-09 13:39:19.305521102 +0000 UTC m=+835.079312141" lastFinishedPulling="2025-10-09 13:39:22.741307595 +0000 UTC m=+838.515098634" observedRunningTime="2025-10-09 13:39:23.365915253 +0000 UTC m=+839.139706302" watchObservedRunningTime="2025-10-09 13:39:23.369935331 +0000 UTC m=+839.143726370" Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.396486 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" podStartSLOduration=2.390723705 podStartE2EDuration="5.396471909s" podCreationTimestamp="2025-10-09 13:39:18 +0000 UTC" firstStartedPulling="2025-10-09 13:39:19.740955645 +0000 UTC m=+835.514746684" lastFinishedPulling="2025-10-09 13:39:22.746703849 +0000 UTC m=+838.520494888" observedRunningTime="2025-10-09 13:39:23.393422067 +0000 UTC m=+839.167213106" watchObservedRunningTime="2025-10-09 13:39:23.396471909 +0000 UTC m=+839.170262948" Oct 09 13:39:23 crc kubenswrapper[4762]: I1009 13:39:23.414013 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ntz88" podStartSLOduration=1.477688323 podStartE2EDuration="4.413994185s" podCreationTimestamp="2025-10-09 13:39:19 +0000 UTC" firstStartedPulling="2025-10-09 13:39:19.800276458 +0000 UTC m=+835.574067507" lastFinishedPulling="2025-10-09 13:39:22.73658233 +0000 UTC m=+838.510373369" observedRunningTime="2025-10-09 13:39:23.410744029 +0000 UTC m=+839.184535068" watchObservedRunningTime="2025-10-09 13:39:23.413994185 +0000 UTC m=+839.187785224" Oct 09 13:39:26 crc kubenswrapper[4762]: I1009 13:39:26.372608 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" event={"ID":"334a2699-c1e4-42e0-b2ce-a2f699f1347c","Type":"ContainerStarted","Data":"0d1443104e3cd61ce1148d90e855d01890724e52b34f08c5584b75dd9b7636d9"} Oct 09 13:39:26 crc kubenswrapper[4762]: I1009 13:39:26.387648 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-wgxbz" podStartSLOduration=2.286481384 podStartE2EDuration="8.387616252s" podCreationTimestamp="2025-10-09 13:39:18 +0000 UTC" firstStartedPulling="2025-10-09 13:39:19.679373133 +0000 UTC m=+835.453164172" lastFinishedPulling="2025-10-09 13:39:25.780508001 +0000 UTC m=+841.554299040" observedRunningTime="2025-10-09 13:39:26.387034847 +0000 UTC m=+842.160825886" watchObservedRunningTime="2025-10-09 13:39:26.387616252 +0000 UTC m=+842.161407291" Oct 09 13:39:29 crc kubenswrapper[4762]: I1009 13:39:29.293533 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-hvkc5" Oct 09 13:39:29 crc kubenswrapper[4762]: I1009 13:39:29.600774 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:29 crc kubenswrapper[4762]: I1009 13:39:29.600818 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:29 crc kubenswrapper[4762]: I1009 13:39:29.607733 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:30 crc kubenswrapper[4762]: I1009 13:39:30.395840 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-86c7f89d77-7vp5d" Oct 09 13:39:30 crc kubenswrapper[4762]: I1009 13:39:30.438445 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:39:39 crc kubenswrapper[4762]: I1009 13:39:39.277615 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-cpm9k" Oct 09 13:39:41 crc kubenswrapper[4762]: I1009 13:39:41.969208 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:39:41 crc kubenswrapper[4762]: I1009 13:39:41.969614 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:39:41 crc kubenswrapper[4762]: I1009 13:39:41.969679 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:39:41 crc kubenswrapper[4762]: I1009 13:39:41.970198 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:39:41 crc kubenswrapper[4762]: I1009 13:39:41.970315 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b" gracePeriod=600 Oct 09 13:39:42 crc kubenswrapper[4762]: I1009 13:39:42.461167 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b" exitCode=0 Oct 09 13:39:42 crc kubenswrapper[4762]: I1009 13:39:42.461270 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b"} Oct 09 13:39:42 crc kubenswrapper[4762]: I1009 13:39:42.461764 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7"} Oct 09 13:39:42 crc kubenswrapper[4762]: I1009 13:39:42.461790 4762 scope.go:117] "RemoveContainer" containerID="a3cfdbddf263d8475109c385067d1bee3767cd398e6e6fb760a08702ea253859" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.060593 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b"] Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.062062 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.066219 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.073681 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b"] Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.225563 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.226023 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctxq8\" (UniqueName: \"kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.226062 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.327705 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.327790 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctxq8\" (UniqueName: \"kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.327823 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.328436 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.328579 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.348697 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctxq8\" (UniqueName: \"kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.384171 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:39:51 crc kubenswrapper[4762]: I1009 13:39:51.565876 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b"] Oct 09 13:39:52 crc kubenswrapper[4762]: I1009 13:39:52.551042 4762 generic.go:334] "Generic (PLEG): container finished" podID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerID="af1ad003c0d4aea95636802cead74c8817ddf81cee3cb8fa7e2dd351c98d7ad2" exitCode=0 Oct 09 13:39:52 crc kubenswrapper[4762]: I1009 13:39:52.551097 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" event={"ID":"4b618f60-c8f7-4334-85cc-165b9c972adf","Type":"ContainerDied","Data":"af1ad003c0d4aea95636802cead74c8817ddf81cee3cb8fa7e2dd351c98d7ad2"} Oct 09 13:39:52 crc kubenswrapper[4762]: I1009 13:39:52.551131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" event={"ID":"4b618f60-c8f7-4334-85cc-165b9c972adf","Type":"ContainerStarted","Data":"dd143252726513c021703e878de10d768fa42da7d523302ec824c1ce5860fde8"} Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.478888 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-r5hfv" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerName="console" containerID="cri-o://fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad" gracePeriod=15 Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.848798 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r5hfv_4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8/console/0.log" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.848931 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989077 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989169 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989204 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989263 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989306 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtznr\" (UniqueName: \"kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989360 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.989408 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config\") pod \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\" (UID: \"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8\") " Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.990345 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.990380 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca" (OuterVolumeSpecName: "service-ca") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.990395 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.990603 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config" (OuterVolumeSpecName: "console-config") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.995409 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.995769 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr" (OuterVolumeSpecName: "kube-api-access-jtznr") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "kube-api-access-jtznr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:39:55 crc kubenswrapper[4762]: I1009 13:39:55.996604 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" (UID: "4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090858 4762 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090894 4762 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090908 4762 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090919 4762 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090931 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtznr\" (UniqueName: \"kubernetes.io/projected/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-kube-api-access-jtznr\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090942 4762 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.090953 4762 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8-console-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.577830 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r5hfv_4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8/console/0.log" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.578299 4762 generic.go:334] "Generic (PLEG): container finished" podID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerID="fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad" exitCode=2 Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.578352 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r5hfv" event={"ID":"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8","Type":"ContainerDied","Data":"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad"} Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.578394 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r5hfv" event={"ID":"4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8","Type":"ContainerDied","Data":"39f11010c23822796c7f5733c5dc2d2fc8c3220b7ceae4e24e5da2ccd4316b1a"} Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.578414 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r5hfv" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.578419 4762 scope.go:117] "RemoveContainer" containerID="fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.611839 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.617523 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-r5hfv"] Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.640326 4762 scope.go:117] "RemoveContainer" containerID="fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad" Oct 09 13:39:56 crc kubenswrapper[4762]: E1009 13:39:56.641070 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad\": container with ID starting with fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad not found: ID does not exist" containerID="fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.641126 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad"} err="failed to get container status \"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad\": rpc error: code = NotFound desc = could not find container \"fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad\": container with ID starting with fa6528bd8c544742983d1d0126961651d3405d8bb19062002dd0ecdb02b083ad not found: ID does not exist" Oct 09 13:39:56 crc kubenswrapper[4762]: I1009 13:39:56.977756 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" path="/var/lib/kubelet/pods/4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8/volumes" Oct 09 13:39:57 crc kubenswrapper[4762]: I1009 13:39:57.586669 4762 generic.go:334] "Generic (PLEG): container finished" podID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerID="dadf5abe75c2e9c5ae2bfad4a9635f3ae9b8323300f7f23bb386c82bc2322abf" exitCode=0 Oct 09 13:39:57 crc kubenswrapper[4762]: I1009 13:39:57.586774 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" event={"ID":"4b618f60-c8f7-4334-85cc-165b9c972adf","Type":"ContainerDied","Data":"dadf5abe75c2e9c5ae2bfad4a9635f3ae9b8323300f7f23bb386c82bc2322abf"} Oct 09 13:39:58 crc kubenswrapper[4762]: I1009 13:39:58.594444 4762 generic.go:334] "Generic (PLEG): container finished" podID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerID="c0a93715739f5591fbf2da91aba105d22059e5444e697ef96d0acd36e45ee5c8" exitCode=0 Oct 09 13:39:58 crc kubenswrapper[4762]: I1009 13:39:58.594498 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" event={"ID":"4b618f60-c8f7-4334-85cc-165b9c972adf","Type":"ContainerDied","Data":"c0a93715739f5591fbf2da91aba105d22059e5444e697ef96d0acd36e45ee5c8"} Oct 09 13:39:59 crc kubenswrapper[4762]: I1009 13:39:59.874685 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.038199 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util\") pod \"4b618f60-c8f7-4334-85cc-165b9c972adf\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.038300 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle\") pod \"4b618f60-c8f7-4334-85cc-165b9c972adf\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.038354 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctxq8\" (UniqueName: \"kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8\") pod \"4b618f60-c8f7-4334-85cc-165b9c972adf\" (UID: \"4b618f60-c8f7-4334-85cc-165b9c972adf\") " Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.039898 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle" (OuterVolumeSpecName: "bundle") pod "4b618f60-c8f7-4334-85cc-165b9c972adf" (UID: "4b618f60-c8f7-4334-85cc-165b9c972adf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.046547 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8" (OuterVolumeSpecName: "kube-api-access-ctxq8") pod "4b618f60-c8f7-4334-85cc-165b9c972adf" (UID: "4b618f60-c8f7-4334-85cc-165b9c972adf"). InnerVolumeSpecName "kube-api-access-ctxq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.053921 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util" (OuterVolumeSpecName: "util") pod "4b618f60-c8f7-4334-85cc-165b9c972adf" (UID: "4b618f60-c8f7-4334-85cc-165b9c972adf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.139569 4762 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-util\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.139616 4762 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4b618f60-c8f7-4334-85cc-165b9c972adf-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.139651 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctxq8\" (UniqueName: \"kubernetes.io/projected/4b618f60-c8f7-4334-85cc-165b9c972adf-kube-api-access-ctxq8\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.610090 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" event={"ID":"4b618f60-c8f7-4334-85cc-165b9c972adf","Type":"ContainerDied","Data":"dd143252726513c021703e878de10d768fa42da7d523302ec824c1ce5860fde8"} Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.610136 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd143252726513c021703e878de10d768fa42da7d523302ec824c1ce5860fde8" Oct 09 13:40:00 crc kubenswrapper[4762]: I1009 13:40:00.610164 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.155132 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g"] Oct 09 13:40:09 crc kubenswrapper[4762]: E1009 13:40:09.156908 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="util" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157013 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="util" Oct 09 13:40:09 crc kubenswrapper[4762]: E1009 13:40:09.157113 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="extract" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157200 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="extract" Oct 09 13:40:09 crc kubenswrapper[4762]: E1009 13:40:09.157267 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerName="console" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157320 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerName="console" Oct 09 13:40:09 crc kubenswrapper[4762]: E1009 13:40:09.157381 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="pull" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157433 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="pull" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157585 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bfd163d-b2e5-4f97-80f1-ca65a1a8f3c8" containerName="console" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.157682 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b618f60-c8f7-4334-85cc-165b9c972adf" containerName="extract" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.158137 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.160779 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.160794 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.160956 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.161291 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2rzkf" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.162729 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.188096 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g"] Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.242916 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-webhook-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.242957 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6l2t\" (UniqueName: \"kubernetes.io/projected/22ec464f-bf5d-4242-afc0-16f41e2c4fca-kube-api-access-x6l2t\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.242985 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-apiservice-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.344338 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-webhook-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.344576 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6l2t\" (UniqueName: \"kubernetes.io/projected/22ec464f-bf5d-4242-afc0-16f41e2c4fca-kube-api-access-x6l2t\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.344600 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-apiservice-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.359570 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-apiservice-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.360195 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22ec464f-bf5d-4242-afc0-16f41e2c4fca-webhook-cert\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.374491 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6l2t\" (UniqueName: \"kubernetes.io/projected/22ec464f-bf5d-4242-afc0-16f41e2c4fca-kube-api-access-x6l2t\") pod \"metallb-operator-controller-manager-664b8674b4-xnl9g\" (UID: \"22ec464f-bf5d-4242-afc0-16f41e2c4fca\") " pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.473365 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.547621 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5"] Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.548504 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.550171 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.550516 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.550685 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-rtwl4" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.594759 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5"] Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.648487 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-webhook-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.648547 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.648601 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-779ng\" (UniqueName: \"kubernetes.io/projected/72251ab1-5239-4ca0-83c4-d5897e76631d-kube-api-access-779ng\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.717869 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g"] Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.749354 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.749425 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-779ng\" (UniqueName: \"kubernetes.io/projected/72251ab1-5239-4ca0-83c4-d5897e76631d-kube-api-access-779ng\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.749463 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-webhook-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.754896 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-webhook-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.755207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72251ab1-5239-4ca0-83c4-d5897e76631d-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.768373 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-779ng\" (UniqueName: \"kubernetes.io/projected/72251ab1-5239-4ca0-83c4-d5897e76631d-kube-api-access-779ng\") pod \"metallb-operator-webhook-server-5ff9fd58c6-tgcw5\" (UID: \"72251ab1-5239-4ca0-83c4-d5897e76631d\") " pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:09 crc kubenswrapper[4762]: I1009 13:40:09.866148 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:10 crc kubenswrapper[4762]: I1009 13:40:10.090690 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5"] Oct 09 13:40:10 crc kubenswrapper[4762]: W1009 13:40:10.098423 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72251ab1_5239_4ca0_83c4_d5897e76631d.slice/crio-07cb133ee39435471c847b5746f09f04a90fc9ea10990cab829cb3a4f3bc1bf7 WatchSource:0}: Error finding container 07cb133ee39435471c847b5746f09f04a90fc9ea10990cab829cb3a4f3bc1bf7: Status 404 returned error can't find the container with id 07cb133ee39435471c847b5746f09f04a90fc9ea10990cab829cb3a4f3bc1bf7 Oct 09 13:40:10 crc kubenswrapper[4762]: I1009 13:40:10.661562 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" event={"ID":"22ec464f-bf5d-4242-afc0-16f41e2c4fca","Type":"ContainerStarted","Data":"8aa604828cf082e2d6d9426e586a4b94a92f39ff957fe9e8a0611d929e66b840"} Oct 09 13:40:10 crc kubenswrapper[4762]: I1009 13:40:10.663148 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" event={"ID":"72251ab1-5239-4ca0-83c4-d5897e76631d","Type":"ContainerStarted","Data":"07cb133ee39435471c847b5746f09f04a90fc9ea10990cab829cb3a4f3bc1bf7"} Oct 09 13:40:13 crc kubenswrapper[4762]: I1009 13:40:13.680942 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" event={"ID":"22ec464f-bf5d-4242-afc0-16f41e2c4fca","Type":"ContainerStarted","Data":"e2cdcc26430225c841c873bc7f73a264e1bac5f1153925a5d1368242995fa102"} Oct 09 13:40:13 crc kubenswrapper[4762]: I1009 13:40:13.681521 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:13 crc kubenswrapper[4762]: I1009 13:40:13.699651 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" podStartSLOduration=1.851341604 podStartE2EDuration="4.699612251s" podCreationTimestamp="2025-10-09 13:40:09 +0000 UTC" firstStartedPulling="2025-10-09 13:40:09.738983678 +0000 UTC m=+885.512774717" lastFinishedPulling="2025-10-09 13:40:12.587254325 +0000 UTC m=+888.361045364" observedRunningTime="2025-10-09 13:40:13.697256788 +0000 UTC m=+889.471047827" watchObservedRunningTime="2025-10-09 13:40:13.699612251 +0000 UTC m=+889.473403300" Oct 09 13:40:15 crc kubenswrapper[4762]: I1009 13:40:15.692389 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" event={"ID":"72251ab1-5239-4ca0-83c4-d5897e76631d","Type":"ContainerStarted","Data":"bfe0e7e0db31cbf0d32429501ca218a5c68ddcff0aa28b38d2bb54c8729af081"} Oct 09 13:40:15 crc kubenswrapper[4762]: I1009 13:40:15.692750 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:15 crc kubenswrapper[4762]: I1009 13:40:15.714420 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" podStartSLOduration=1.5400146399999999 podStartE2EDuration="6.714399184s" podCreationTimestamp="2025-10-09 13:40:09 +0000 UTC" firstStartedPulling="2025-10-09 13:40:10.101490264 +0000 UTC m=+885.875281303" lastFinishedPulling="2025-10-09 13:40:15.275874808 +0000 UTC m=+891.049665847" observedRunningTime="2025-10-09 13:40:15.709942936 +0000 UTC m=+891.483733985" watchObservedRunningTime="2025-10-09 13:40:15.714399184 +0000 UTC m=+891.488190223" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.600185 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.601588 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.615323 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.667765 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.667879 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfqgl\" (UniqueName: \"kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.667917 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.769730 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.769806 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfqgl\" (UniqueName: \"kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.769845 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.770371 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.770382 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.788716 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfqgl\" (UniqueName: \"kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl\") pod \"certified-operators-cq45s\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:18 crc kubenswrapper[4762]: I1009 13:40:18.921143 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:19 crc kubenswrapper[4762]: I1009 13:40:19.199312 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:19 crc kubenswrapper[4762]: I1009 13:40:19.715883 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerID="ab3aadf6e4a1c69960a0b1b8b3c8152d35341f258c63479b184c862fd824b283" exitCode=0 Oct 09 13:40:19 crc kubenswrapper[4762]: I1009 13:40:19.715943 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerDied","Data":"ab3aadf6e4a1c69960a0b1b8b3c8152d35341f258c63479b184c862fd824b283"} Oct 09 13:40:19 crc kubenswrapper[4762]: I1009 13:40:19.715982 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerStarted","Data":"b7e00b11592f8b81238e937ad9e2d65df45b2f11d939d5e5ad932b46e5d53e1d"} Oct 09 13:40:21 crc kubenswrapper[4762]: I1009 13:40:21.727863 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerID="d4c7bbd77a7d89c5faf866c6d2a3d5eb5e0d9a20d94ce4fe93f210e658bf61ca" exitCode=0 Oct 09 13:40:21 crc kubenswrapper[4762]: I1009 13:40:21.727911 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerDied","Data":"d4c7bbd77a7d89c5faf866c6d2a3d5eb5e0d9a20d94ce4fe93f210e658bf61ca"} Oct 09 13:40:22 crc kubenswrapper[4762]: I1009 13:40:22.735844 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerStarted","Data":"d01c29029993e770bc9af55cdeb1ec4483cbec124c8ee2412d6ca2f5cedf5bcb"} Oct 09 13:40:22 crc kubenswrapper[4762]: I1009 13:40:22.752941 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cq45s" podStartSLOduration=2.045509743 podStartE2EDuration="4.752921636s" podCreationTimestamp="2025-10-09 13:40:18 +0000 UTC" firstStartedPulling="2025-10-09 13:40:19.718010308 +0000 UTC m=+895.491801357" lastFinishedPulling="2025-10-09 13:40:22.425422211 +0000 UTC m=+898.199213250" observedRunningTime="2025-10-09 13:40:22.752718231 +0000 UTC m=+898.526509280" watchObservedRunningTime="2025-10-09 13:40:22.752921636 +0000 UTC m=+898.526712685" Oct 09 13:40:28 crc kubenswrapper[4762]: I1009 13:40:28.921385 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:28 crc kubenswrapper[4762]: I1009 13:40:28.922027 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:28 crc kubenswrapper[4762]: I1009 13:40:28.972812 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:29 crc kubenswrapper[4762]: I1009 13:40:29.808239 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:29 crc kubenswrapper[4762]: I1009 13:40:29.879397 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5ff9fd58c6-tgcw5" Oct 09 13:40:31 crc kubenswrapper[4762]: I1009 13:40:31.781685 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:31 crc kubenswrapper[4762]: I1009 13:40:31.783235 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cq45s" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="registry-server" containerID="cri-o://d01c29029993e770bc9af55cdeb1ec4483cbec124c8ee2412d6ca2f5cedf5bcb" gracePeriod=2 Oct 09 13:40:32 crc kubenswrapper[4762]: I1009 13:40:32.790433 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerID="d01c29029993e770bc9af55cdeb1ec4483cbec124c8ee2412d6ca2f5cedf5bcb" exitCode=0 Oct 09 13:40:32 crc kubenswrapper[4762]: I1009 13:40:32.790494 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerDied","Data":"d01c29029993e770bc9af55cdeb1ec4483cbec124c8ee2412d6ca2f5cedf5bcb"} Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.269668 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.411840 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfqgl\" (UniqueName: \"kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl\") pod \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.411951 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content\") pod \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.412020 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities\") pod \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\" (UID: \"2c3a0ee0-cb43-48ab-8912-64a12e9f8006\") " Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.413167 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities" (OuterVolumeSpecName: "utilities") pod "2c3a0ee0-cb43-48ab-8912-64a12e9f8006" (UID: "2c3a0ee0-cb43-48ab-8912-64a12e9f8006"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.418525 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl" (OuterVolumeSpecName: "kube-api-access-wfqgl") pod "2c3a0ee0-cb43-48ab-8912-64a12e9f8006" (UID: "2c3a0ee0-cb43-48ab-8912-64a12e9f8006"). InnerVolumeSpecName "kube-api-access-wfqgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.452283 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c3a0ee0-cb43-48ab-8912-64a12e9f8006" (UID: "2c3a0ee0-cb43-48ab-8912-64a12e9f8006"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.513768 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.513819 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfqgl\" (UniqueName: \"kubernetes.io/projected/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-kube-api-access-wfqgl\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.513834 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c3a0ee0-cb43-48ab-8912-64a12e9f8006-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.788467 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:33 crc kubenswrapper[4762]: E1009 13:40:33.789128 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="registry-server" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.789227 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="registry-server" Oct 09 13:40:33 crc kubenswrapper[4762]: E1009 13:40:33.789320 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="extract-utilities" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.789398 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="extract-utilities" Oct 09 13:40:33 crc kubenswrapper[4762]: E1009 13:40:33.789480 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="extract-content" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.789568 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="extract-content" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.789787 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" containerName="registry-server" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.790760 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.798468 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cq45s" event={"ID":"2c3a0ee0-cb43-48ab-8912-64a12e9f8006","Type":"ContainerDied","Data":"b7e00b11592f8b81238e937ad9e2d65df45b2f11d939d5e5ad932b46e5d53e1d"} Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.798520 4762 scope.go:117] "RemoveContainer" containerID="d01c29029993e770bc9af55cdeb1ec4483cbec124c8ee2412d6ca2f5cedf5bcb" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.798662 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cq45s" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.804498 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.822177 4762 scope.go:117] "RemoveContainer" containerID="d4c7bbd77a7d89c5faf866c6d2a3d5eb5e0d9a20d94ce4fe93f210e658bf61ca" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.844567 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.851706 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cq45s"] Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.861913 4762 scope.go:117] "RemoveContainer" containerID="ab3aadf6e4a1c69960a0b1b8b3c8152d35341f258c63479b184c862fd824b283" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.919505 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.919579 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlh7d\" (UniqueName: \"kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:33 crc kubenswrapper[4762]: I1009 13:40:33.919604 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.020556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlh7d\" (UniqueName: \"kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.020607 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.020707 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.021229 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.021282 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.046266 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlh7d\" (UniqueName: \"kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d\") pod \"community-operators-cwgdt\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.114427 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.592004 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.804669 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerStarted","Data":"6ae61de7846c95594b3b3c0923defcaaca3d960e1cdbb4f84d67ab60d22a5245"} Oct 09 13:40:34 crc kubenswrapper[4762]: I1009 13:40:34.971561 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c3a0ee0-cb43-48ab-8912-64a12e9f8006" path="/var/lib/kubelet/pods/2c3a0ee0-cb43-48ab-8912-64a12e9f8006/volumes" Oct 09 13:40:35 crc kubenswrapper[4762]: I1009 13:40:35.812336 4762 generic.go:334] "Generic (PLEG): container finished" podID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerID="3852612f40e840980c8459a216578eb640eedfc511d9744d9ea1cc491588edab" exitCode=0 Oct 09 13:40:35 crc kubenswrapper[4762]: I1009 13:40:35.812507 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerDied","Data":"3852612f40e840980c8459a216578eb640eedfc511d9744d9ea1cc491588edab"} Oct 09 13:40:37 crc kubenswrapper[4762]: I1009 13:40:37.827769 4762 generic.go:334] "Generic (PLEG): container finished" podID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerID="cd8fbedf164ef5c53010e752c2272dc7ecb5c43277adf73a78aad68b50176eb2" exitCode=0 Oct 09 13:40:37 crc kubenswrapper[4762]: I1009 13:40:37.827841 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerDied","Data":"cd8fbedf164ef5c53010e752c2272dc7ecb5c43277adf73a78aad68b50176eb2"} Oct 09 13:40:38 crc kubenswrapper[4762]: I1009 13:40:38.835130 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerStarted","Data":"1da79f26359aee26752c54acdbfcef87849705856e9295f190304364682367f7"} Oct 09 13:40:38 crc kubenswrapper[4762]: I1009 13:40:38.861611 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cwgdt" podStartSLOduration=3.225315629 podStartE2EDuration="5.86156618s" podCreationTimestamp="2025-10-09 13:40:33 +0000 UTC" firstStartedPulling="2025-10-09 13:40:35.815287541 +0000 UTC m=+911.589078580" lastFinishedPulling="2025-10-09 13:40:38.451538092 +0000 UTC m=+914.225329131" observedRunningTime="2025-10-09 13:40:38.856933028 +0000 UTC m=+914.630724077" watchObservedRunningTime="2025-10-09 13:40:38.86156618 +0000 UTC m=+914.635357229" Oct 09 13:40:44 crc kubenswrapper[4762]: I1009 13:40:44.114954 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:44 crc kubenswrapper[4762]: I1009 13:40:44.116155 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:44 crc kubenswrapper[4762]: I1009 13:40:44.154716 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:44 crc kubenswrapper[4762]: I1009 13:40:44.930004 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.585341 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.586622 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.595928 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.674373 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.674459 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjqvr\" (UniqueName: \"kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.674532 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.775625 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.775731 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjqvr\" (UniqueName: \"kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.775810 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.776130 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.776222 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.795834 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjqvr\" (UniqueName: \"kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr\") pod \"redhat-marketplace-p6qjv\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:46 crc kubenswrapper[4762]: I1009 13:40:46.928067 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.374155 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.580514 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.581089 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cwgdt" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="registry-server" containerID="cri-o://1da79f26359aee26752c54acdbfcef87849705856e9295f190304364682367f7" gracePeriod=2 Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.885684 4762 generic.go:334] "Generic (PLEG): container finished" podID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerID="1da79f26359aee26752c54acdbfcef87849705856e9295f190304364682367f7" exitCode=0 Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.885766 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerDied","Data":"1da79f26359aee26752c54acdbfcef87849705856e9295f190304364682367f7"} Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.885793 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwgdt" event={"ID":"047ec742-a59d-48a6-89e5-3dff1c3df714","Type":"ContainerDied","Data":"6ae61de7846c95594b3b3c0923defcaaca3d960e1cdbb4f84d67ab60d22a5245"} Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.885804 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ae61de7846c95594b3b3c0923defcaaca3d960e1cdbb4f84d67ab60d22a5245" Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.887145 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerID="c2ca596ad04bd550a90495206695f1fea8c5834a390e91a5b39f9864757021f0" exitCode=0 Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.887166 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerDied","Data":"c2ca596ad04bd550a90495206695f1fea8c5834a390e91a5b39f9864757021f0"} Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.887180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerStarted","Data":"5cd9489d0c761311c580b264858baadb34ccb29c58cfe3af1733b61ff31524fe"} Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.924769 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.992097 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities\") pod \"047ec742-a59d-48a6-89e5-3dff1c3df714\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.992143 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlh7d\" (UniqueName: \"kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d\") pod \"047ec742-a59d-48a6-89e5-3dff1c3df714\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.992197 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content\") pod \"047ec742-a59d-48a6-89e5-3dff1c3df714\" (UID: \"047ec742-a59d-48a6-89e5-3dff1c3df714\") " Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.993266 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities" (OuterVolumeSpecName: "utilities") pod "047ec742-a59d-48a6-89e5-3dff1c3df714" (UID: "047ec742-a59d-48a6-89e5-3dff1c3df714"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:47 crc kubenswrapper[4762]: I1009 13:40:47.999345 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d" (OuterVolumeSpecName: "kube-api-access-rlh7d") pod "047ec742-a59d-48a6-89e5-3dff1c3df714" (UID: "047ec742-a59d-48a6-89e5-3dff1c3df714"). InnerVolumeSpecName "kube-api-access-rlh7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.093218 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.093246 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlh7d\" (UniqueName: \"kubernetes.io/projected/047ec742-a59d-48a6-89e5-3dff1c3df714-kube-api-access-rlh7d\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.831992 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "047ec742-a59d-48a6-89e5-3dff1c3df714" (UID: "047ec742-a59d-48a6-89e5-3dff1c3df714"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.893037 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwgdt" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.904346 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047ec742-a59d-48a6-89e5-3dff1c3df714-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.928001 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.932643 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cwgdt"] Oct 09 13:40:48 crc kubenswrapper[4762]: I1009 13:40:48.976021 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" path="/var/lib/kubelet/pods/047ec742-a59d-48a6-89e5-3dff1c3df714/volumes" Oct 09 13:40:49 crc kubenswrapper[4762]: I1009 13:40:49.476593 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-664b8674b4-xnl9g" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.116752 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm"] Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.117361 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="registry-server" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.117379 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="registry-server" Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.117393 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="extract-utilities" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.117400 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="extract-utilities" Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.117410 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="extract-content" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.117418 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="extract-content" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.117545 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="047ec742-a59d-48a6-89e5-3dff1c3df714" containerName="registry-server" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.118051 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.122981 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.124043 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mgnz6" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.129997 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-72zw7"] Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.132704 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.134725 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.134751 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm"] Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.135044 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.208132 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-p7rwn"] Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.209248 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.211559 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.211734 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.211784 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.215188 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-5jcj5"] Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.216041 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.221174 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xhnht" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.221398 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222230 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics-certs\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222283 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6stlw\" (UniqueName: \"kubernetes.io/projected/0831f7e0-0729-47bc-b78d-cd6594ac3102-kube-api-access-6stlw\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222324 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04ba99a6-67e1-4aba-a037-2c47a60a992e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222458 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-reloader\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222521 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk65s\" (UniqueName: \"kubernetes.io/projected/04ba99a6-67e1-4aba-a037-2c47a60a992e-kube-api-access-bk65s\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222612 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-startup\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222687 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-sockets\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.222759 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-conf\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.231904 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-5jcj5"] Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323512 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metrics-certs\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323576 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323609 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-reloader\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323652 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk65s\" (UniqueName: \"kubernetes.io/projected/04ba99a6-67e1-4aba-a037-2c47a60a992e-kube-api-access-bk65s\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323671 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-startup\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323693 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-sockets\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323720 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-conf\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323737 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8ffh\" (UniqueName: \"kubernetes.io/projected/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-kube-api-access-h8ffh\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323762 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metallb-excludel2\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics-certs\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323804 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-cert\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6stlw\" (UniqueName: \"kubernetes.io/projected/0831f7e0-0729-47bc-b78d-cd6594ac3102-kube-api-access-6stlw\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldbcr\" (UniqueName: \"kubernetes.io/projected/7ba03cf9-3a22-4979-8c03-3f533b7e556a-kube-api-access-ldbcr\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323872 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.323886 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04ba99a6-67e1-4aba-a037-2c47a60a992e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.324086 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-reloader\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.324739 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-conf\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.324765 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.325025 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-sockets\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.325618 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/0831f7e0-0729-47bc-b78d-cd6594ac3102-frr-startup\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.330765 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04ba99a6-67e1-4aba-a037-2c47a60a992e-cert\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.338000 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0831f7e0-0729-47bc-b78d-cd6594ac3102-metrics-certs\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.341882 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk65s\" (UniqueName: \"kubernetes.io/projected/04ba99a6-67e1-4aba-a037-2c47a60a992e-kube-api-access-bk65s\") pod \"frr-k8s-webhook-server-64bf5d555-kt8dm\" (UID: \"04ba99a6-67e1-4aba-a037-2c47a60a992e\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.343999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6stlw\" (UniqueName: \"kubernetes.io/projected/0831f7e0-0729-47bc-b78d-cd6594ac3102-kube-api-access-6stlw\") pod \"frr-k8s-72zw7\" (UID: \"0831f7e0-0729-47bc-b78d-cd6594ac3102\") " pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.425508 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metrics-certs\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.425573 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.425646 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8ffh\" (UniqueName: \"kubernetes.io/projected/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-kube-api-access-h8ffh\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.425683 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metallb-excludel2\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.425817 4762 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.425897 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist podName:c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8 nodeName:}" failed. No retries permitted until 2025-10-09 13:40:50.925876177 +0000 UTC m=+926.699667216 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist") pod "speaker-p7rwn" (UID: "c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8") : secret "metallb-memberlist" not found Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.426742 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metallb-excludel2\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.425723 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-cert\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.426840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.426865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldbcr\" (UniqueName: \"kubernetes.io/projected/7ba03cf9-3a22-4979-8c03-3f533b7e556a-kube-api-access-ldbcr\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.426937 4762 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.426969 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs podName:7ba03cf9-3a22-4979-8c03-3f533b7e556a nodeName:}" failed. No retries permitted until 2025-10-09 13:40:50.926957256 +0000 UTC m=+926.700748295 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs") pod "controller-68d546b9d8-5jcj5" (UID: "7ba03cf9-3a22-4979-8c03-3f533b7e556a") : secret "controller-certs-secret" not found Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.428974 4762 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.429331 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-metrics-certs\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.430339 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.440204 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-cert\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.445112 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.445932 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8ffh\" (UniqueName: \"kubernetes.io/projected/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-kube-api-access-h8ffh\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.453368 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldbcr\" (UniqueName: \"kubernetes.io/projected/7ba03cf9-3a22-4979-8c03-3f533b7e556a-kube-api-access-ldbcr\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.644495 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm"] Oct 09 13:40:50 crc kubenswrapper[4762]: W1009 13:40:50.652912 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04ba99a6_67e1_4aba_a037_2c47a60a992e.slice/crio-8da75e66c8b103f2c65557719440a0035a268a39561d5eb6f74ee3d094201f28 WatchSource:0}: Error finding container 8da75e66c8b103f2c65557719440a0035a268a39561d5eb6f74ee3d094201f28: Status 404 returned error can't find the container with id 8da75e66c8b103f2c65557719440a0035a268a39561d5eb6f74ee3d094201f28 Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.904568 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"0dbb92a409aab12cb67d675eb24642803b637f9592f350659f2a9fed5a665b01"} Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.907260 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerID="a7ea67e48a769a49f29978989bee7fc1195478e0d8fba2469bfe04d80cdea4d7" exitCode=0 Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.907308 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerDied","Data":"a7ea67e48a769a49f29978989bee7fc1195478e0d8fba2469bfe04d80cdea4d7"} Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.908492 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" event={"ID":"04ba99a6-67e1-4aba-a037-2c47a60a992e","Type":"ContainerStarted","Data":"8da75e66c8b103f2c65557719440a0035a268a39561d5eb6f74ee3d094201f28"} Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.940300 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.940409 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.940589 4762 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 09 13:40:50 crc kubenswrapper[4762]: E1009 13:40:50.940902 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist podName:c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8 nodeName:}" failed. No retries permitted until 2025-10-09 13:40:51.940876696 +0000 UTC m=+927.714667735 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist") pod "speaker-p7rwn" (UID: "c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8") : secret "metallb-memberlist" not found Oct 09 13:40:50 crc kubenswrapper[4762]: I1009 13:40:50.945737 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ba03cf9-3a22-4979-8c03-3f533b7e556a-metrics-certs\") pod \"controller-68d546b9d8-5jcj5\" (UID: \"7ba03cf9-3a22-4979-8c03-3f533b7e556a\") " pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.131229 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.308785 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-5jcj5"] Oct 09 13:40:51 crc kubenswrapper[4762]: W1009 13:40:51.318906 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ba03cf9_3a22_4979_8c03_3f533b7e556a.slice/crio-9c89765c27e0dd3d9441cbc19f55b3bdfbc04df983e45655a10713e04c835d92 WatchSource:0}: Error finding container 9c89765c27e0dd3d9441cbc19f55b3bdfbc04df983e45655a10713e04c835d92: Status 404 returned error can't find the container with id 9c89765c27e0dd3d9441cbc19f55b3bdfbc04df983e45655a10713e04c835d92 Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.916992 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-5jcj5" event={"ID":"7ba03cf9-3a22-4979-8c03-3f533b7e556a","Type":"ContainerStarted","Data":"d312ca90de3ab4f2a873538ae0dbab50994df84f41cf9638327fb38ec35a123c"} Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.917476 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-5jcj5" event={"ID":"7ba03cf9-3a22-4979-8c03-3f533b7e556a","Type":"ContainerStarted","Data":"9f13a7aa5c90e3afec8a4a24fc256ea478a4509e57cbb981db3f63f9cc562c32"} Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.917543 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-5jcj5" event={"ID":"7ba03cf9-3a22-4979-8c03-3f533b7e556a","Type":"ContainerStarted","Data":"9c89765c27e0dd3d9441cbc19f55b3bdfbc04df983e45655a10713e04c835d92"} Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.918051 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.918952 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerStarted","Data":"46d5a50b2ebc6433d00bacef24949bdd544235a554b3b01d30c59736caa7543c"} Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.936493 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-5jcj5" podStartSLOduration=1.936475659 podStartE2EDuration="1.936475659s" podCreationTimestamp="2025-10-09 13:40:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:40:51.932143704 +0000 UTC m=+927.705934743" watchObservedRunningTime="2025-10-09 13:40:51.936475659 +0000 UTC m=+927.710266688" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.954557 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.954906 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p6qjv" podStartSLOduration=2.40789335 podStartE2EDuration="5.954890378s" podCreationTimestamp="2025-10-09 13:40:46 +0000 UTC" firstStartedPulling="2025-10-09 13:40:47.888409392 +0000 UTC m=+923.662200431" lastFinishedPulling="2025-10-09 13:40:51.43540642 +0000 UTC m=+927.209197459" observedRunningTime="2025-10-09 13:40:51.9519659 +0000 UTC m=+927.725756949" watchObservedRunningTime="2025-10-09 13:40:51.954890378 +0000 UTC m=+927.728681417" Oct 09 13:40:51 crc kubenswrapper[4762]: I1009 13:40:51.959943 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8-memberlist\") pod \"speaker-p7rwn\" (UID: \"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8\") " pod="metallb-system/speaker-p7rwn" Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.023947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-p7rwn" Oct 09 13:40:52 crc kubenswrapper[4762]: W1009 13:40:52.048482 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc19ba7bc_b0a1_4e8b_98e5_56bd395aacc8.slice/crio-a73fe61eb423653401e84ad405d7fe52f82d79b0d791f1862f55e8f1b5a015aa WatchSource:0}: Error finding container a73fe61eb423653401e84ad405d7fe52f82d79b0d791f1862f55e8f1b5a015aa: Status 404 returned error can't find the container with id a73fe61eb423653401e84ad405d7fe52f82d79b0d791f1862f55e8f1b5a015aa Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.931894 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7rwn" event={"ID":"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8","Type":"ContainerStarted","Data":"7eec3087b93de490386ce0e249325eb61c2c227fa8840d5cf1fda7581d46bddb"} Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.932228 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7rwn" event={"ID":"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8","Type":"ContainerStarted","Data":"85c26656005b493083e6694478f369369956611c41331a6fe948190077eb674f"} Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.932247 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-p7rwn" event={"ID":"c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8","Type":"ContainerStarted","Data":"a73fe61eb423653401e84ad405d7fe52f82d79b0d791f1862f55e8f1b5a015aa"} Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.933337 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-p7rwn" Oct 09 13:40:52 crc kubenswrapper[4762]: I1009 13:40:52.952008 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-p7rwn" podStartSLOduration=2.951988811 podStartE2EDuration="2.951988811s" podCreationTimestamp="2025-10-09 13:40:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:40:52.949800893 +0000 UTC m=+928.723591942" watchObservedRunningTime="2025-10-09 13:40:52.951988811 +0000 UTC m=+928.725779840" Oct 09 13:40:56 crc kubenswrapper[4762]: I1009 13:40:56.928213 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:56 crc kubenswrapper[4762]: I1009 13:40:56.928812 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:56 crc kubenswrapper[4762]: I1009 13:40:56.973349 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:57 crc kubenswrapper[4762]: I1009 13:40:57.018317 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:40:59 crc kubenswrapper[4762]: I1009 13:40:59.379527 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:40:59 crc kubenswrapper[4762]: I1009 13:40:59.380038 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p6qjv" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="registry-server" containerID="cri-o://46d5a50b2ebc6433d00bacef24949bdd544235a554b3b01d30c59736caa7543c" gracePeriod=2 Oct 09 13:40:59 crc kubenswrapper[4762]: I1009 13:40:59.977884 4762 generic.go:334] "Generic (PLEG): container finished" podID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerID="46d5a50b2ebc6433d00bacef24949bdd544235a554b3b01d30c59736caa7543c" exitCode=0 Oct 09 13:40:59 crc kubenswrapper[4762]: I1009 13:40:59.978278 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerDied","Data":"46d5a50b2ebc6433d00bacef24949bdd544235a554b3b01d30c59736caa7543c"} Oct 09 13:41:01 crc kubenswrapper[4762]: I1009 13:41:01.135484 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-5jcj5" Oct 09 13:41:02 crc kubenswrapper[4762]: I1009 13:41:02.028001 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-p7rwn" Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.827432 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx"] Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.829207 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.832169 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.840896 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx"] Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.918045 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.918107 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcwsm\" (UniqueName: \"kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:03 crc kubenswrapper[4762]: I1009 13:41:03.918248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.020127 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.020185 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcwsm\" (UniqueName: \"kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.020229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.020749 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.020772 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.081897 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcwsm\" (UniqueName: \"kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.145912 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.496159 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.627847 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities\") pod \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.627917 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjqvr\" (UniqueName: \"kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr\") pod \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.628027 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content\") pod \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\" (UID: \"cf985dfe-e72b-4970-90a7-bf2b31e67fb4\") " Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.629136 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities" (OuterVolumeSpecName: "utilities") pod "cf985dfe-e72b-4970-90a7-bf2b31e67fb4" (UID: "cf985dfe-e72b-4970-90a7-bf2b31e67fb4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.633843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr" (OuterVolumeSpecName: "kube-api-access-zjqvr") pod "cf985dfe-e72b-4970-90a7-bf2b31e67fb4" (UID: "cf985dfe-e72b-4970-90a7-bf2b31e67fb4"). InnerVolumeSpecName "kube-api-access-zjqvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.664370 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf985dfe-e72b-4970-90a7-bf2b31e67fb4" (UID: "cf985dfe-e72b-4970-90a7-bf2b31e67fb4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.729131 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.729162 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.729174 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjqvr\" (UniqueName: \"kubernetes.io/projected/cf985dfe-e72b-4970-90a7-bf2b31e67fb4-kube-api-access-zjqvr\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:04 crc kubenswrapper[4762]: W1009 13:41:04.942401 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24e5b893_0118_47dc_a409_8809d8bc7eb2.slice/crio-4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6 WatchSource:0}: Error finding container 4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6: Status 404 returned error can't find the container with id 4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6 Oct 09 13:41:04 crc kubenswrapper[4762]: I1009 13:41:04.943741 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx"] Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.006098 4762 generic.go:334] "Generic (PLEG): container finished" podID="0831f7e0-0729-47bc-b78d-cd6594ac3102" containerID="5e5362b4f89fcf30bd1ac4076ec42acdc2d521f05a3402a9bb0a69229b1706a2" exitCode=0 Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.006277 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerDied","Data":"5e5362b4f89fcf30bd1ac4076ec42acdc2d521f05a3402a9bb0a69229b1706a2"} Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.007817 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" event={"ID":"24e5b893-0118-47dc-a409-8809d8bc7eb2","Type":"ContainerStarted","Data":"4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6"} Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.013035 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6qjv" event={"ID":"cf985dfe-e72b-4970-90a7-bf2b31e67fb4","Type":"ContainerDied","Data":"5cd9489d0c761311c580b264858baadb34ccb29c58cfe3af1733b61ff31524fe"} Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.013080 4762 scope.go:117] "RemoveContainer" containerID="46d5a50b2ebc6433d00bacef24949bdd544235a554b3b01d30c59736caa7543c" Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.013183 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6qjv" Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.015011 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" event={"ID":"04ba99a6-67e1-4aba-a037-2c47a60a992e","Type":"ContainerStarted","Data":"855133ebd2bed9501826d17173ac789ec0d913a9345398c85e5fa93bfdb41aaa"} Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.015139 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.054081 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" podStartSLOduration=1.044641823 podStartE2EDuration="15.054061391s" podCreationTimestamp="2025-10-09 13:40:50 +0000 UTC" firstStartedPulling="2025-10-09 13:40:50.655416088 +0000 UTC m=+926.429207127" lastFinishedPulling="2025-10-09 13:41:04.664835656 +0000 UTC m=+940.438626695" observedRunningTime="2025-10-09 13:41:05.048889673 +0000 UTC m=+940.822680712" watchObservedRunningTime="2025-10-09 13:41:05.054061391 +0000 UTC m=+940.827852430" Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.056833 4762 scope.go:117] "RemoveContainer" containerID="a7ea67e48a769a49f29978989bee7fc1195478e0d8fba2469bfe04d80cdea4d7" Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.073655 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.077740 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6qjv"] Oct 09 13:41:05 crc kubenswrapper[4762]: I1009 13:41:05.126074 4762 scope.go:117] "RemoveContainer" containerID="c2ca596ad04bd550a90495206695f1fea8c5834a390e91a5b39f9864757021f0" Oct 09 13:41:06 crc kubenswrapper[4762]: I1009 13:41:06.024234 4762 generic.go:334] "Generic (PLEG): container finished" podID="0831f7e0-0729-47bc-b78d-cd6594ac3102" containerID="fe516dac1e1dd0887a40624718b9517b417524c19c1e15c000b552960300c0f1" exitCode=0 Oct 09 13:41:06 crc kubenswrapper[4762]: I1009 13:41:06.024299 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerDied","Data":"fe516dac1e1dd0887a40624718b9517b417524c19c1e15c000b552960300c0f1"} Oct 09 13:41:06 crc kubenswrapper[4762]: I1009 13:41:06.026119 4762 generic.go:334] "Generic (PLEG): container finished" podID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerID="85e5d46f86c15d7f8416e2a4acf9dbfc1553ac4a121880c7cd5a665853876114" exitCode=0 Oct 09 13:41:06 crc kubenswrapper[4762]: I1009 13:41:06.026302 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" event={"ID":"24e5b893-0118-47dc-a409-8809d8bc7eb2","Type":"ContainerDied","Data":"85e5d46f86c15d7f8416e2a4acf9dbfc1553ac4a121880c7cd5a665853876114"} Oct 09 13:41:06 crc kubenswrapper[4762]: I1009 13:41:06.975068 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" path="/var/lib/kubelet/pods/cf985dfe-e72b-4970-90a7-bf2b31e67fb4/volumes" Oct 09 13:41:08 crc kubenswrapper[4762]: I1009 13:41:08.042391 4762 generic.go:334] "Generic (PLEG): container finished" podID="0831f7e0-0729-47bc-b78d-cd6594ac3102" containerID="818aa6093bbb2c886b1fe61fea58d1d4bc18de1ae990a46576edb9e5b33f7fcf" exitCode=0 Oct 09 13:41:08 crc kubenswrapper[4762]: I1009 13:41:08.042434 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerDied","Data":"818aa6093bbb2c886b1fe61fea58d1d4bc18de1ae990a46576edb9e5b33f7fcf"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.055854 4762 generic.go:334] "Generic (PLEG): container finished" podID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerID="11636aa0160aad52c670f87b95398c7f6dc8a522bdc817798499eddccd29a952" exitCode=0 Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.055918 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" event={"ID":"24e5b893-0118-47dc-a409-8809d8bc7eb2","Type":"ContainerDied","Data":"11636aa0160aad52c670f87b95398c7f6dc8a522bdc817798499eddccd29a952"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.062115 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"5d7907056a8272e62edd576aefde66264c0c0f4bd41298b8b7364e0cd3a285ec"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.062146 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"2b8420d7dc9f1c7c9177f4fb0c7b25f8e76e8504ad880b729253e89b017a0491"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.062155 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"ecd4e214aca2401a0707641a33e6a13b487a66e52370b0f7fff2b7263dc1ce1e"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.062164 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"edc383b66a2e79200ae2c15e81baa412dd4d2c79a8a055fc6edd77072b6434a8"} Oct 09 13:41:10 crc kubenswrapper[4762]: I1009 13:41:10.062172 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"566459db07bded5ab57175d005a75591911a2ea17d096481e0a3a9635f01ea0e"} Oct 09 13:41:11 crc kubenswrapper[4762]: I1009 13:41:11.070891 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-72zw7" event={"ID":"0831f7e0-0729-47bc-b78d-cd6594ac3102","Type":"ContainerStarted","Data":"3cd14926028857e7a4b00af3f580e5495795aac9bd628e5d573a356eeb4347f9"} Oct 09 13:41:11 crc kubenswrapper[4762]: I1009 13:41:11.072629 4762 generic.go:334] "Generic (PLEG): container finished" podID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerID="a3a7d7faabd7ad7efb7b305fdc8b4b7052eb25ca2eaa9e152095b080862efc91" exitCode=0 Oct 09 13:41:11 crc kubenswrapper[4762]: I1009 13:41:11.072790 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" event={"ID":"24e5b893-0118-47dc-a409-8809d8bc7eb2","Type":"ContainerDied","Data":"a3a7d7faabd7ad7efb7b305fdc8b4b7052eb25ca2eaa9e152095b080862efc91"} Oct 09 13:41:11 crc kubenswrapper[4762]: I1009 13:41:11.098267 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-72zw7" podStartSLOduration=7.055183323 podStartE2EDuration="21.098242224s" podCreationTimestamp="2025-10-09 13:40:50 +0000 UTC" firstStartedPulling="2025-10-09 13:40:50.584872274 +0000 UTC m=+926.358663313" lastFinishedPulling="2025-10-09 13:41:04.627931175 +0000 UTC m=+940.401722214" observedRunningTime="2025-10-09 13:41:11.093124448 +0000 UTC m=+946.866915487" watchObservedRunningTime="2025-10-09 13:41:11.098242224 +0000 UTC m=+946.872033273" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.078866 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.344603 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.427970 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle\") pod \"24e5b893-0118-47dc-a409-8809d8bc7eb2\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.428063 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcwsm\" (UniqueName: \"kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm\") pod \"24e5b893-0118-47dc-a409-8809d8bc7eb2\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.428983 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle" (OuterVolumeSpecName: "bundle") pod "24e5b893-0118-47dc-a409-8809d8bc7eb2" (UID: "24e5b893-0118-47dc-a409-8809d8bc7eb2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.429203 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util\") pod \"24e5b893-0118-47dc-a409-8809d8bc7eb2\" (UID: \"24e5b893-0118-47dc-a409-8809d8bc7eb2\") " Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.429481 4762 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.444783 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm" (OuterVolumeSpecName: "kube-api-access-gcwsm") pod "24e5b893-0118-47dc-a409-8809d8bc7eb2" (UID: "24e5b893-0118-47dc-a409-8809d8bc7eb2"). InnerVolumeSpecName "kube-api-access-gcwsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.475628 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util" (OuterVolumeSpecName: "util") pod "24e5b893-0118-47dc-a409-8809d8bc7eb2" (UID: "24e5b893-0118-47dc-a409-8809d8bc7eb2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.533799 4762 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24e5b893-0118-47dc-a409-8809d8bc7eb2-util\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:12 crc kubenswrapper[4762]: I1009 13:41:12.533843 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcwsm\" (UniqueName: \"kubernetes.io/projected/24e5b893-0118-47dc-a409-8809d8bc7eb2-kube-api-access-gcwsm\") on node \"crc\" DevicePath \"\"" Oct 09 13:41:13 crc kubenswrapper[4762]: I1009 13:41:13.086964 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" event={"ID":"24e5b893-0118-47dc-a409-8809d8bc7eb2","Type":"ContainerDied","Data":"4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6"} Oct 09 13:41:13 crc kubenswrapper[4762]: I1009 13:41:13.087008 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4216faa9390da95e003b64e85384618456ebeb5dd1a09538960a696f3e5d74a6" Oct 09 13:41:13 crc kubenswrapper[4762]: I1009 13:41:13.087007 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx" Oct 09 13:41:15 crc kubenswrapper[4762]: I1009 13:41:15.445890 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:41:15 crc kubenswrapper[4762]: I1009 13:41:15.489676 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.622940 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d"] Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623397 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="pull" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623412 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="pull" Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623432 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="registry-server" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623441 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="registry-server" Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623454 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="extract-content" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623468 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="extract-content" Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623482 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="util" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623489 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="util" Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623513 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="extract-utilities" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623520 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="extract-utilities" Oct 09 13:41:16 crc kubenswrapper[4762]: E1009 13:41:16.623535 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="extract" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623542 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="extract" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623815 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="24e5b893-0118-47dc-a409-8809d8bc7eb2" containerName="extract" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.623835 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf985dfe-e72b-4970-90a7-bf2b31e67fb4" containerName="registry-server" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.624496 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.629947 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.631952 4762 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-dlbgd" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.632079 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.665836 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d"] Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.683983 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7vn4\" (UniqueName: \"kubernetes.io/projected/d9ff4a32-4f96-46bc-83e6-090a43f7901f-kube-api-access-f7vn4\") pod \"cert-manager-operator-controller-manager-57cd46d6d-7h47d\" (UID: \"d9ff4a32-4f96-46bc-83e6-090a43f7901f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.785142 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7vn4\" (UniqueName: \"kubernetes.io/projected/d9ff4a32-4f96-46bc-83e6-090a43f7901f-kube-api-access-f7vn4\") pod \"cert-manager-operator-controller-manager-57cd46d6d-7h47d\" (UID: \"d9ff4a32-4f96-46bc-83e6-090a43f7901f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.821414 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7vn4\" (UniqueName: \"kubernetes.io/projected/d9ff4a32-4f96-46bc-83e6-090a43f7901f-kube-api-access-f7vn4\") pod \"cert-manager-operator-controller-manager-57cd46d6d-7h47d\" (UID: \"d9ff4a32-4f96-46bc-83e6-090a43f7901f\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" Oct 09 13:41:16 crc kubenswrapper[4762]: I1009 13:41:16.950241 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" Oct 09 13:41:17 crc kubenswrapper[4762]: I1009 13:41:17.412819 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d"] Oct 09 13:41:17 crc kubenswrapper[4762]: W1009 13:41:17.422937 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9ff4a32_4f96_46bc_83e6_090a43f7901f.slice/crio-545c042f828683b23757f0f08bc0edc3c9943ea2896ee440ae71e72bdbe6c1f6 WatchSource:0}: Error finding container 545c042f828683b23757f0f08bc0edc3c9943ea2896ee440ae71e72bdbe6c1f6: Status 404 returned error can't find the container with id 545c042f828683b23757f0f08bc0edc3c9943ea2896ee440ae71e72bdbe6c1f6 Oct 09 13:41:18 crc kubenswrapper[4762]: I1009 13:41:18.123380 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" event={"ID":"d9ff4a32-4f96-46bc-83e6-090a43f7901f","Type":"ContainerStarted","Data":"545c042f828683b23757f0f08bc0edc3c9943ea2896ee440ae71e72bdbe6c1f6"} Oct 09 13:41:20 crc kubenswrapper[4762]: I1009 13:41:20.435434 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-kt8dm" Oct 09 13:41:20 crc kubenswrapper[4762]: I1009 13:41:20.450178 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-72zw7" Oct 09 13:41:28 crc kubenswrapper[4762]: I1009 13:41:28.206692 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" event={"ID":"d9ff4a32-4f96-46bc-83e6-090a43f7901f","Type":"ContainerStarted","Data":"ece51e733f37f955a79c00602ef2008ac63cbadb58a9a96a65902482b45a52d5"} Oct 09 13:41:28 crc kubenswrapper[4762]: I1009 13:41:28.225006 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-7h47d" podStartSLOduration=2.639395727 podStartE2EDuration="12.224988629s" podCreationTimestamp="2025-10-09 13:41:16 +0000 UTC" firstStartedPulling="2025-10-09 13:41:17.424606537 +0000 UTC m=+953.198397576" lastFinishedPulling="2025-10-09 13:41:27.010199429 +0000 UTC m=+962.783990478" observedRunningTime="2025-10-09 13:41:28.224465285 +0000 UTC m=+963.998256354" watchObservedRunningTime="2025-10-09 13:41:28.224988629 +0000 UTC m=+963.998779668" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.659057 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-6zmq2"] Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.660267 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.662188 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.663066 4762 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-fjpts" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.663530 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.685390 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-6zmq2"] Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.691296 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-bound-sa-token\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.691341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l5hv\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-kube-api-access-7l5hv\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.792976 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-bound-sa-token\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.793029 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l5hv\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-kube-api-access-7l5hv\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.812942 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-bound-sa-token\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.819027 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l5hv\" (UniqueName: \"kubernetes.io/projected/ecb933a6-29c1-4e1e-b774-7a3e28eedcfd-kube-api-access-7l5hv\") pod \"cert-manager-webhook-d969966f-6zmq2\" (UID: \"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd\") " pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:31 crc kubenswrapper[4762]: I1009 13:41:31.977382 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.227371 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-6zmq2"] Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.442748 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw"] Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.443859 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.445885 4762 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-ksd55" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.453821 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw"] Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.502076 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkt6q\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-kube-api-access-qkt6q\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.502131 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.604300 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkt6q\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-kube-api-access-qkt6q\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.604372 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.628364 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkt6q\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-kube-api-access-qkt6q\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.632577 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5e1b445-a576-4876-816a-2ee258514618-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-xphxw\" (UID: \"f5e1b445-a576-4876-816a-2ee258514618\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:32 crc kubenswrapper[4762]: I1009 13:41:32.761246 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" Oct 09 13:41:33 crc kubenswrapper[4762]: I1009 13:41:33.184359 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw"] Oct 09 13:41:33 crc kubenswrapper[4762]: I1009 13:41:33.242018 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" event={"ID":"f5e1b445-a576-4876-816a-2ee258514618","Type":"ContainerStarted","Data":"7db2c6e979f032967a471419675db4e67890929e9e6e83f9e3eae9325749f275"} Oct 09 13:41:33 crc kubenswrapper[4762]: I1009 13:41:33.243069 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" event={"ID":"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd","Type":"ContainerStarted","Data":"bce0e3eef771bc3323234fd119c1dffb4966d82e1ca66ac3e5074cbc8da7194a"} Oct 09 13:41:40 crc kubenswrapper[4762]: I1009 13:41:40.287760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" event={"ID":"ecb933a6-29c1-4e1e-b774-7a3e28eedcfd","Type":"ContainerStarted","Data":"c2917a00856de53110e8aaddb0cc356bbe23ca6923f310371c731c723ddf3f24"} Oct 09 13:41:40 crc kubenswrapper[4762]: I1009 13:41:40.290013 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:40 crc kubenswrapper[4762]: I1009 13:41:40.290111 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" event={"ID":"f5e1b445-a576-4876-816a-2ee258514618","Type":"ContainerStarted","Data":"bf838d8c0c1a1b69f68aff3308070525ae2a48fa314573bfe786bb9b27892ab5"} Oct 09 13:41:40 crc kubenswrapper[4762]: I1009 13:41:40.303176 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" podStartSLOduration=2.272630212 podStartE2EDuration="9.30315497s" podCreationTimestamp="2025-10-09 13:41:31 +0000 UTC" firstStartedPulling="2025-10-09 13:41:32.23938137 +0000 UTC m=+968.013172409" lastFinishedPulling="2025-10-09 13:41:39.269906128 +0000 UTC m=+975.043697167" observedRunningTime="2025-10-09 13:41:40.301332372 +0000 UTC m=+976.075123411" watchObservedRunningTime="2025-10-09 13:41:40.30315497 +0000 UTC m=+976.076946009" Oct 09 13:41:40 crc kubenswrapper[4762]: I1009 13:41:40.317996 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-xphxw" podStartSLOduration=2.227607055 podStartE2EDuration="8.317980885s" podCreationTimestamp="2025-10-09 13:41:32 +0000 UTC" firstStartedPulling="2025-10-09 13:41:33.196029047 +0000 UTC m=+968.969820076" lastFinishedPulling="2025-10-09 13:41:39.286402857 +0000 UTC m=+975.060193906" observedRunningTime="2025-10-09 13:41:40.317589324 +0000 UTC m=+976.091380363" watchObservedRunningTime="2025-10-09 13:41:40.317980885 +0000 UTC m=+976.091771914" Oct 09 13:41:46 crc kubenswrapper[4762]: I1009 13:41:46.980292 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-6zmq2" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.593917 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-lsnbv"] Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.594773 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.596678 4762 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6zjth" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.604652 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-lsnbv"] Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.669178 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.669354 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnt5b\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-kube-api-access-qnt5b\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.770256 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.770349 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnt5b\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-kube-api-access-qnt5b\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.789774 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.789941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnt5b\" (UniqueName: \"kubernetes.io/projected/3d68cf04-0159-4fe6-9346-750541b4acb2-kube-api-access-qnt5b\") pod \"cert-manager-7d4cc89fcb-lsnbv\" (UID: \"3d68cf04-0159-4fe6-9346-750541b4acb2\") " pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:49 crc kubenswrapper[4762]: I1009 13:41:49.917537 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" Oct 09 13:41:50 crc kubenswrapper[4762]: I1009 13:41:50.368383 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-lsnbv"] Oct 09 13:41:51 crc kubenswrapper[4762]: I1009 13:41:51.363024 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" event={"ID":"3d68cf04-0159-4fe6-9346-750541b4acb2","Type":"ContainerStarted","Data":"98c14c925f1c3de3f4cf51d5f619862040c7caa4f08989d70984d4e191af4883"} Oct 09 13:41:51 crc kubenswrapper[4762]: I1009 13:41:51.363353 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" event={"ID":"3d68cf04-0159-4fe6-9346-750541b4acb2","Type":"ContainerStarted","Data":"956b3b7e177005fd79ab9bf1d50ee5a3fdd03642a813565162b75b8530e639d7"} Oct 09 13:41:51 crc kubenswrapper[4762]: I1009 13:41:51.380350 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-lsnbv" podStartSLOduration=2.3803238159999998 podStartE2EDuration="2.380323816s" podCreationTimestamp="2025-10-09 13:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:41:51.375490027 +0000 UTC m=+987.149281066" watchObservedRunningTime="2025-10-09 13:41:51.380323816 +0000 UTC m=+987.154114855" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.341614 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.343114 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.344945 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-dkgsb" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.345241 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.362870 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.365123 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.428260 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlmzz\" (UniqueName: \"kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz\") pod \"openstack-operator-index-fqlsx\" (UID: \"e32f37bf-2487-4fad-8b61-742bfc437e92\") " pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.529474 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlmzz\" (UniqueName: \"kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz\") pod \"openstack-operator-index-fqlsx\" (UID: \"e32f37bf-2487-4fad-8b61-742bfc437e92\") " pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.554447 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlmzz\" (UniqueName: \"kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz\") pod \"openstack-operator-index-fqlsx\" (UID: \"e32f37bf-2487-4fad-8b61-742bfc437e92\") " pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:00 crc kubenswrapper[4762]: I1009 13:42:00.661500 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:01 crc kubenswrapper[4762]: I1009 13:42:01.065011 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:01 crc kubenswrapper[4762]: W1009 13:42:01.067212 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode32f37bf_2487_4fad_8b61_742bfc437e92.slice/crio-002c2fcef46e1c0124ca852238cabe9165b6177cca396e9caae8d1d4035964e8 WatchSource:0}: Error finding container 002c2fcef46e1c0124ca852238cabe9165b6177cca396e9caae8d1d4035964e8: Status 404 returned error can't find the container with id 002c2fcef46e1c0124ca852238cabe9165b6177cca396e9caae8d1d4035964e8 Oct 09 13:42:01 crc kubenswrapper[4762]: I1009 13:42:01.425878 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqlsx" event={"ID":"e32f37bf-2487-4fad-8b61-742bfc437e92","Type":"ContainerStarted","Data":"002c2fcef46e1c0124ca852238cabe9165b6177cca396e9caae8d1d4035964e8"} Oct 09 13:42:02 crc kubenswrapper[4762]: I1009 13:42:02.721903 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.126963 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-g9lz8"] Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.131423 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.150144 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g9lz8"] Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.169332 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnwf5\" (UniqueName: \"kubernetes.io/projected/de824a72-d0e7-4aff-802e-c181778233b2-kube-api-access-jnwf5\") pod \"openstack-operator-index-g9lz8\" (UID: \"de824a72-d0e7-4aff-802e-c181778233b2\") " pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.271259 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnwf5\" (UniqueName: \"kubernetes.io/projected/de824a72-d0e7-4aff-802e-c181778233b2-kube-api-access-jnwf5\") pod \"openstack-operator-index-g9lz8\" (UID: \"de824a72-d0e7-4aff-802e-c181778233b2\") " pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.307048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnwf5\" (UniqueName: \"kubernetes.io/projected/de824a72-d0e7-4aff-802e-c181778233b2-kube-api-access-jnwf5\") pod \"openstack-operator-index-g9lz8\" (UID: \"de824a72-d0e7-4aff-802e-c181778233b2\") " pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:03 crc kubenswrapper[4762]: I1009 13:42:03.461618 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:04 crc kubenswrapper[4762]: I1009 13:42:04.220785 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-g9lz8"] Oct 09 13:42:04 crc kubenswrapper[4762]: W1009 13:42:04.775893 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde824a72_d0e7_4aff_802e_c181778233b2.slice/crio-d0d52ed7c4f38e9182368991cced88ab587265997d6ef42a14a94413d2e7a20d WatchSource:0}: Error finding container d0d52ed7c4f38e9182368991cced88ab587265997d6ef42a14a94413d2e7a20d: Status 404 returned error can't find the container with id d0d52ed7c4f38e9182368991cced88ab587265997d6ef42a14a94413d2e7a20d Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.453105 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqlsx" event={"ID":"e32f37bf-2487-4fad-8b61-742bfc437e92","Type":"ContainerStarted","Data":"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0"} Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.453211 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-fqlsx" podUID="e32f37bf-2487-4fad-8b61-742bfc437e92" containerName="registry-server" containerID="cri-o://10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0" gracePeriod=2 Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.454893 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g9lz8" event={"ID":"de824a72-d0e7-4aff-802e-c181778233b2","Type":"ContainerStarted","Data":"5bd8f056d3d061967d3d2647740e41f748eb0307cd8383911a1cd2169001b7b8"} Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.455735 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-g9lz8" event={"ID":"de824a72-d0e7-4aff-802e-c181778233b2","Type":"ContainerStarted","Data":"d0d52ed7c4f38e9182368991cced88ab587265997d6ef42a14a94413d2e7a20d"} Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.478660 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-fqlsx" podStartSLOduration=1.7194405430000002 podStartE2EDuration="5.478616728s" podCreationTimestamp="2025-10-09 13:42:00 +0000 UTC" firstStartedPulling="2025-10-09 13:42:01.070225124 +0000 UTC m=+996.844016153" lastFinishedPulling="2025-10-09 13:42:04.829401299 +0000 UTC m=+1000.603192338" observedRunningTime="2025-10-09 13:42:05.472880726 +0000 UTC m=+1001.246671765" watchObservedRunningTime="2025-10-09 13:42:05.478616728 +0000 UTC m=+1001.252407767" Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.486987 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-g9lz8" podStartSLOduration=2.435374879 podStartE2EDuration="2.4869677s" podCreationTimestamp="2025-10-09 13:42:03 +0000 UTC" firstStartedPulling="2025-10-09 13:42:04.778455466 +0000 UTC m=+1000.552246505" lastFinishedPulling="2025-10-09 13:42:04.830048287 +0000 UTC m=+1000.603839326" observedRunningTime="2025-10-09 13:42:05.485978453 +0000 UTC m=+1001.259769502" watchObservedRunningTime="2025-10-09 13:42:05.4869677 +0000 UTC m=+1001.260758739" Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.791815 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.907956 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlmzz\" (UniqueName: \"kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz\") pod \"e32f37bf-2487-4fad-8b61-742bfc437e92\" (UID: \"e32f37bf-2487-4fad-8b61-742bfc437e92\") " Oct 09 13:42:05 crc kubenswrapper[4762]: I1009 13:42:05.913693 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz" (OuterVolumeSpecName: "kube-api-access-nlmzz") pod "e32f37bf-2487-4fad-8b61-742bfc437e92" (UID: "e32f37bf-2487-4fad-8b61-742bfc437e92"). InnerVolumeSpecName "kube-api-access-nlmzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.009348 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlmzz\" (UniqueName: \"kubernetes.io/projected/e32f37bf-2487-4fad-8b61-742bfc437e92-kube-api-access-nlmzz\") on node \"crc\" DevicePath \"\"" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.468916 4762 generic.go:334] "Generic (PLEG): container finished" podID="e32f37bf-2487-4fad-8b61-742bfc437e92" containerID="10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0" exitCode=0 Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.468960 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqlsx" event={"ID":"e32f37bf-2487-4fad-8b61-742bfc437e92","Type":"ContainerDied","Data":"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0"} Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.470297 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqlsx" event={"ID":"e32f37bf-2487-4fad-8b61-742bfc437e92","Type":"ContainerDied","Data":"002c2fcef46e1c0124ca852238cabe9165b6177cca396e9caae8d1d4035964e8"} Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.470392 4762 scope.go:117] "RemoveContainer" containerID="10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.469009 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqlsx" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.490408 4762 scope.go:117] "RemoveContainer" containerID="10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0" Oct 09 13:42:06 crc kubenswrapper[4762]: E1009 13:42:06.490887 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0\": container with ID starting with 10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0 not found: ID does not exist" containerID="10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.490953 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0"} err="failed to get container status \"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0\": rpc error: code = NotFound desc = could not find container \"10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0\": container with ID starting with 10c5c542f9a8e1fca8fd58e372b31341eee2e220628b28f7398dbfe7e62f99a0 not found: ID does not exist" Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.498049 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.502732 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-fqlsx"] Oct 09 13:42:06 crc kubenswrapper[4762]: I1009 13:42:06.972483 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e32f37bf-2487-4fad-8b61-742bfc437e92" path="/var/lib/kubelet/pods/e32f37bf-2487-4fad-8b61-742bfc437e92/volumes" Oct 09 13:42:11 crc kubenswrapper[4762]: I1009 13:42:11.969736 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:42:11 crc kubenswrapper[4762]: I1009 13:42:11.970254 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:42:13 crc kubenswrapper[4762]: I1009 13:42:13.462165 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:13 crc kubenswrapper[4762]: I1009 13:42:13.462268 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:13 crc kubenswrapper[4762]: I1009 13:42:13.491513 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:13 crc kubenswrapper[4762]: I1009 13:42:13.542917 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-g9lz8" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.588719 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg"] Oct 09 13:42:33 crc kubenswrapper[4762]: E1009 13:42:33.589679 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e32f37bf-2487-4fad-8b61-742bfc437e92" containerName="registry-server" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.589696 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e32f37bf-2487-4fad-8b61-742bfc437e92" containerName="registry-server" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.589820 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e32f37bf-2487-4fad-8b61-742bfc437e92" containerName="registry-server" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.590696 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.592865 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-pl2cz" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.601804 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg"] Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.672476 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.672908 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.673026 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck5j4\" (UniqueName: \"kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.774317 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.774411 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck5j4\" (UniqueName: \"kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.774451 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.775105 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.775145 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.798799 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck5j4\" (UniqueName: \"kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4\") pod \"3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:33 crc kubenswrapper[4762]: I1009 13:42:33.915112 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:34 crc kubenswrapper[4762]: I1009 13:42:34.116073 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg"] Oct 09 13:42:34 crc kubenswrapper[4762]: I1009 13:42:34.653367 4762 generic.go:334] "Generic (PLEG): container finished" podID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerID="f0e2437df79603ca8c0678f6c173b005731fee1da7d28879f24794908687761a" exitCode=0 Oct 09 13:42:34 crc kubenswrapper[4762]: I1009 13:42:34.653423 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" event={"ID":"13e464a6-1da6-4201-bd1f-0a2af8539e90","Type":"ContainerDied","Data":"f0e2437df79603ca8c0678f6c173b005731fee1da7d28879f24794908687761a"} Oct 09 13:42:34 crc kubenswrapper[4762]: I1009 13:42:34.655456 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" event={"ID":"13e464a6-1da6-4201-bd1f-0a2af8539e90","Type":"ContainerStarted","Data":"1b2fe4f306358ca29e0d0fdd996108764eb84382c72b20e65a9dbde9f3c7b09e"} Oct 09 13:42:35 crc kubenswrapper[4762]: I1009 13:42:35.664761 4762 generic.go:334] "Generic (PLEG): container finished" podID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerID="0c551228ecaa442128b050f7e2e8224584a8a4d4304b2337e8d8cfa2acad3d26" exitCode=0 Oct 09 13:42:35 crc kubenswrapper[4762]: I1009 13:42:35.664902 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" event={"ID":"13e464a6-1da6-4201-bd1f-0a2af8539e90","Type":"ContainerDied","Data":"0c551228ecaa442128b050f7e2e8224584a8a4d4304b2337e8d8cfa2acad3d26"} Oct 09 13:42:36 crc kubenswrapper[4762]: I1009 13:42:36.673925 4762 generic.go:334] "Generic (PLEG): container finished" podID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerID="7677bf2d1f21fa1047159a2447ffa83d161ab186b3f6f789fb0bf420c1ae3c5e" exitCode=0 Oct 09 13:42:36 crc kubenswrapper[4762]: I1009 13:42:36.673981 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" event={"ID":"13e464a6-1da6-4201-bd1f-0a2af8539e90","Type":"ContainerDied","Data":"7677bf2d1f21fa1047159a2447ffa83d161ab186b3f6f789fb0bf420c1ae3c5e"} Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.908798 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.928191 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util\") pod \"13e464a6-1da6-4201-bd1f-0a2af8539e90\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.929824 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle\") pod \"13e464a6-1da6-4201-bd1f-0a2af8539e90\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.930356 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck5j4\" (UniqueName: \"kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4\") pod \"13e464a6-1da6-4201-bd1f-0a2af8539e90\" (UID: \"13e464a6-1da6-4201-bd1f-0a2af8539e90\") " Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.931260 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle" (OuterVolumeSpecName: "bundle") pod "13e464a6-1da6-4201-bd1f-0a2af8539e90" (UID: "13e464a6-1da6-4201-bd1f-0a2af8539e90"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.938551 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4" (OuterVolumeSpecName: "kube-api-access-ck5j4") pod "13e464a6-1da6-4201-bd1f-0a2af8539e90" (UID: "13e464a6-1da6-4201-bd1f-0a2af8539e90"). InnerVolumeSpecName "kube-api-access-ck5j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:42:37 crc kubenswrapper[4762]: I1009 13:42:37.946263 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util" (OuterVolumeSpecName: "util") pod "13e464a6-1da6-4201-bd1f-0a2af8539e90" (UID: "13e464a6-1da6-4201-bd1f-0a2af8539e90"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.032257 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck5j4\" (UniqueName: \"kubernetes.io/projected/13e464a6-1da6-4201-bd1f-0a2af8539e90-kube-api-access-ck5j4\") on node \"crc\" DevicePath \"\"" Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.032309 4762 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-util\") on node \"crc\" DevicePath \"\"" Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.032324 4762 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13e464a6-1da6-4201-bd1f-0a2af8539e90-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.687729 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" event={"ID":"13e464a6-1da6-4201-bd1f-0a2af8539e90","Type":"ContainerDied","Data":"1b2fe4f306358ca29e0d0fdd996108764eb84382c72b20e65a9dbde9f3c7b09e"} Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.688066 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b2fe4f306358ca29e0d0fdd996108764eb84382c72b20e65a9dbde9f3c7b09e" Oct 09 13:42:38 crc kubenswrapper[4762]: I1009 13:42:38.687820 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.321667 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6"] Oct 09 13:42:41 crc kubenswrapper[4762]: E1009 13:42:41.322293 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="extract" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.322311 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="extract" Oct 09 13:42:41 crc kubenswrapper[4762]: E1009 13:42:41.322326 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="pull" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.322334 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="pull" Oct 09 13:42:41 crc kubenswrapper[4762]: E1009 13:42:41.322363 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="util" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.322373 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="util" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.322522 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e464a6-1da6-4201-bd1f-0a2af8539e90" containerName="extract" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.323291 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.325804 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-c7vg8" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.348677 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6"] Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.376326 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bljzr\" (UniqueName: \"kubernetes.io/projected/d3668269-2baf-43a1-9444-dddfb7a169b7-kube-api-access-bljzr\") pod \"openstack-operator-controller-operator-6747c5c7c8-jjnz6\" (UID: \"d3668269-2baf-43a1-9444-dddfb7a169b7\") " pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.477900 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bljzr\" (UniqueName: \"kubernetes.io/projected/d3668269-2baf-43a1-9444-dddfb7a169b7-kube-api-access-bljzr\") pod \"openstack-operator-controller-operator-6747c5c7c8-jjnz6\" (UID: \"d3668269-2baf-43a1-9444-dddfb7a169b7\") " pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.501402 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bljzr\" (UniqueName: \"kubernetes.io/projected/d3668269-2baf-43a1-9444-dddfb7a169b7-kube-api-access-bljzr\") pod \"openstack-operator-controller-operator-6747c5c7c8-jjnz6\" (UID: \"d3668269-2baf-43a1-9444-dddfb7a169b7\") " pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.640983 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.862332 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6"] Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.969825 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:42:41 crc kubenswrapper[4762]: I1009 13:42:41.969916 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:42:42 crc kubenswrapper[4762]: I1009 13:42:42.718519 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" event={"ID":"d3668269-2baf-43a1-9444-dddfb7a169b7","Type":"ContainerStarted","Data":"fbaa312d8e98b4a515f65d6661552657c90f97ec4594d1f3641a7cb0ca1ec9d2"} Oct 09 13:42:46 crc kubenswrapper[4762]: I1009 13:42:46.761121 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" event={"ID":"d3668269-2baf-43a1-9444-dddfb7a169b7","Type":"ContainerStarted","Data":"b5a35e6e99cdfa00b6160f6778e04ea93d76055aafeb9b9cda1f8fda1671f888"} Oct 09 13:42:50 crc kubenswrapper[4762]: I1009 13:42:50.792895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" event={"ID":"d3668269-2baf-43a1-9444-dddfb7a169b7","Type":"ContainerStarted","Data":"049db4f9a4a70c83d9b0a550e82635c9c416c7e9931f3d26858b196a7f4f623e"} Oct 09 13:42:50 crc kubenswrapper[4762]: I1009 13:42:50.793533 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:50 crc kubenswrapper[4762]: I1009 13:42:50.795769 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" Oct 09 13:42:50 crc kubenswrapper[4762]: I1009 13:42:50.837228 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6747c5c7c8-jjnz6" podStartSLOduration=1.936380388 podStartE2EDuration="9.837194792s" podCreationTimestamp="2025-10-09 13:42:41 +0000 UTC" firstStartedPulling="2025-10-09 13:42:41.875671246 +0000 UTC m=+1037.649462285" lastFinishedPulling="2025-10-09 13:42:49.77648564 +0000 UTC m=+1045.550276689" observedRunningTime="2025-10-09 13:42:50.82921924 +0000 UTC m=+1046.603010289" watchObservedRunningTime="2025-10-09 13:42:50.837194792 +0000 UTC m=+1046.610985871" Oct 09 13:43:11 crc kubenswrapper[4762]: I1009 13:43:11.969068 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:43:11 crc kubenswrapper[4762]: I1009 13:43:11.969617 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:43:11 crc kubenswrapper[4762]: I1009 13:43:11.969671 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:43:12 crc kubenswrapper[4762]: I1009 13:43:12.932319 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:43:12 crc kubenswrapper[4762]: I1009 13:43:12.932408 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7" gracePeriod=600 Oct 09 13:43:15 crc kubenswrapper[4762]: I1009 13:43:15.969619 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7" exitCode=0 Oct 09 13:43:15 crc kubenswrapper[4762]: I1009 13:43:15.969807 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7"} Oct 09 13:43:15 crc kubenswrapper[4762]: I1009 13:43:15.970222 4762 scope.go:117] "RemoveContainer" containerID="b3e2931f38d6c24f318bca2e81d69458e7536a5e2c15ed2699be45822005a52b" Oct 09 13:43:16 crc kubenswrapper[4762]: I1009 13:43:16.977181 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7"} Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.025039 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.026740 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.029007 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-vdbfn" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.029550 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.030584 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.033843 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-kkd9c" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.050485 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.051870 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.057119 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-bpgb9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.066692 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.075966 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.092575 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.093502 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.098180 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-p5nwm" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.113605 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.117439 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.127505 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.128723 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvblc\" (UniqueName: \"kubernetes.io/projected/b22d7fd7-7386-4c10-9133-7703f8f2e0b4-kube-api-access-kvblc\") pod \"cinder-operator-controller-manager-59cdc64769-z8s2p\" (UID: \"b22d7fd7-7386-4c10-9133-7703f8f2e0b4\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.128768 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq5v9\" (UniqueName: \"kubernetes.io/projected/0302ab2b-3fbe-4d08-8364-872d1c1be2b7-kube-api-access-fq5v9\") pod \"designate-operator-controller-manager-687df44cdb-sn56k\" (UID: \"0302ab2b-3fbe-4d08-8364-872d1c1be2b7\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.128827 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pst7j\" (UniqueName: \"kubernetes.io/projected/f19642d0-b3f1-4de9-811a-8bd523f204c1-kube-api-access-pst7j\") pod \"barbican-operator-controller-manager-64f84fcdbb-8qmk9\" (UID: \"f19642d0-b3f1-4de9-811a-8bd523f204c1\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.128935 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.137116 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-dx5sm" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.138247 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.139288 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.140623 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-65j9b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.144008 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.149303 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.157672 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.159975 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.164201 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-77bm9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.166103 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.180728 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.222008 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.230485 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvblc\" (UniqueName: \"kubernetes.io/projected/b22d7fd7-7386-4c10-9133-7703f8f2e0b4-kube-api-access-kvblc\") pod \"cinder-operator-controller-manager-59cdc64769-z8s2p\" (UID: \"b22d7fd7-7386-4c10-9133-7703f8f2e0b4\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.230642 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xppdd\" (UniqueName: \"kubernetes.io/projected/a226d4b7-ceef-4cfd-aeb8-727fb0c8786d-kube-api-access-xppdd\") pod \"heat-operator-controller-manager-6d9967f8dd-r6dpz\" (UID: \"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.230706 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq5v9\" (UniqueName: \"kubernetes.io/projected/0302ab2b-3fbe-4d08-8364-872d1c1be2b7-kube-api-access-fq5v9\") pod \"designate-operator-controller-manager-687df44cdb-sn56k\" (UID: \"0302ab2b-3fbe-4d08-8364-872d1c1be2b7\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.230739 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65phc\" (UniqueName: \"kubernetes.io/projected/6484b20c-9ee2-4134-bbc6-5c57c175f1db-kube-api-access-65phc\") pod \"glance-operator-controller-manager-7bb46cd7d-z4xhb\" (UID: \"6484b20c-9ee2-4134-bbc6-5c57c175f1db\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.231094 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsccx\" (UniqueName: \"kubernetes.io/projected/1b9fae99-ccfb-4f2e-9225-7eb67624ee5a-kube-api-access-nsccx\") pod \"horizon-operator-controller-manager-6d74794d9b-4n6tj\" (UID: \"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.231152 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2ddk\" (UniqueName: \"kubernetes.io/projected/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-kube-api-access-z2ddk\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.238213 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.238653 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pst7j\" (UniqueName: \"kubernetes.io/projected/f19642d0-b3f1-4de9-811a-8bd523f204c1-kube-api-access-pst7j\") pod \"barbican-operator-controller-manager-64f84fcdbb-8qmk9\" (UID: \"f19642d0-b3f1-4de9-811a-8bd523f204c1\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.238766 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.245181 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-2qjzn" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.266216 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.266881 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvblc\" (UniqueName: \"kubernetes.io/projected/b22d7fd7-7386-4c10-9133-7703f8f2e0b4-kube-api-access-kvblc\") pod \"cinder-operator-controller-manager-59cdc64769-z8s2p\" (UID: \"b22d7fd7-7386-4c10-9133-7703f8f2e0b4\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.268319 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq5v9\" (UniqueName: \"kubernetes.io/projected/0302ab2b-3fbe-4d08-8364-872d1c1be2b7-kube-api-access-fq5v9\") pod \"designate-operator-controller-manager-687df44cdb-sn56k\" (UID: \"0302ab2b-3fbe-4d08-8364-872d1c1be2b7\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.277067 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pst7j\" (UniqueName: \"kubernetes.io/projected/f19642d0-b3f1-4de9-811a-8bd523f204c1-kube-api-access-pst7j\") pod \"barbican-operator-controller-manager-64f84fcdbb-8qmk9\" (UID: \"f19642d0-b3f1-4de9-811a-8bd523f204c1\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.299736 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.302358 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.308135 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bjzz4" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.322118 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.323419 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.325354 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-5m5w4" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.338415 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2ddk\" (UniqueName: \"kubernetes.io/projected/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-kube-api-access-z2ddk\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339520 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xppdd\" (UniqueName: \"kubernetes.io/projected/a226d4b7-ceef-4cfd-aeb8-727fb0c8786d-kube-api-access-xppdd\") pod \"heat-operator-controller-manager-6d9967f8dd-r6dpz\" (UID: \"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339563 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65phc\" (UniqueName: \"kubernetes.io/projected/6484b20c-9ee2-4134-bbc6-5c57c175f1db-kube-api-access-65phc\") pod \"glance-operator-controller-manager-7bb46cd7d-z4xhb\" (UID: \"6484b20c-9ee2-4134-bbc6-5c57c175f1db\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339600 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqkhp\" (UniqueName: \"kubernetes.io/projected/d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c-kube-api-access-lqkhp\") pod \"ironic-operator-controller-manager-74cb5cbc49-7g4mb\" (UID: \"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.339658 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsccx\" (UniqueName: \"kubernetes.io/projected/1b9fae99-ccfb-4f2e-9225-7eb67624ee5a-kube-api-access-nsccx\") pod \"horizon-operator-controller-manager-6d74794d9b-4n6tj\" (UID: \"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:18 crc kubenswrapper[4762]: E1009 13:43:18.340199 4762 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 09 13:43:18 crc kubenswrapper[4762]: E1009 13:43:18.340264 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert podName:a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9 nodeName:}" failed. No retries permitted until 2025-10-09 13:43:18.84024393 +0000 UTC m=+1074.614034969 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert") pod "infra-operator-controller-manager-585fc5b659-qv9zg" (UID: "a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9") : secret "infra-operator-webhook-server-cert" not found Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.353057 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.357612 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.372979 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.375384 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xppdd\" (UniqueName: \"kubernetes.io/projected/a226d4b7-ceef-4cfd-aeb8-727fb0c8786d-kube-api-access-xppdd\") pod \"heat-operator-controller-manager-6d9967f8dd-r6dpz\" (UID: \"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.375464 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.376785 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.378425 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2ddk\" (UniqueName: \"kubernetes.io/projected/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-kube-api-access-z2ddk\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.382065 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-v2n8t" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.382272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65phc\" (UniqueName: \"kubernetes.io/projected/6484b20c-9ee2-4134-bbc6-5c57c175f1db-kube-api-access-65phc\") pod \"glance-operator-controller-manager-7bb46cd7d-z4xhb\" (UID: \"6484b20c-9ee2-4134-bbc6-5c57c175f1db\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.389959 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.391084 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.391363 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.400439 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.401768 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.404963 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsccx\" (UniqueName: \"kubernetes.io/projected/1b9fae99-ccfb-4f2e-9225-7eb67624ee5a-kube-api-access-nsccx\") pod \"horizon-operator-controller-manager-6d74794d9b-4n6tj\" (UID: \"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.425118 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-x8dc7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.425517 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-k7fmb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.428933 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.430051 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.441541 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tf49\" (UniqueName: \"kubernetes.io/projected/0024a375-268b-4c89-ad32-2b3876e271af-kube-api-access-4tf49\") pod \"manila-operator-controller-manager-59578bc799-tdnrr\" (UID: \"0024a375-268b-4c89-ad32-2b3876e271af\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.441586 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mtp6\" (UniqueName: \"kubernetes.io/projected/4141c889-724a-4a6f-886c-d1b6fa852d0f-kube-api-access-2mtp6\") pod \"mariadb-operator-controller-manager-5777b4f897-6s6tx\" (UID: \"4141c889-724a-4a6f-886c-d1b6fa852d0f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.441671 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqkhp\" (UniqueName: \"kubernetes.io/projected/d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c-kube-api-access-lqkhp\") pod \"ironic-operator-controller-manager-74cb5cbc49-7g4mb\" (UID: \"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.441695 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bqzp\" (UniqueName: \"kubernetes.io/projected/117e3f88-b1fd-4738-bd66-8c8e0e25a488-kube-api-access-9bqzp\") pod \"keystone-operator-controller-manager-ddb98f99b-pnlww\" (UID: \"117e3f88-b1fd-4738-bd66-8c8e0e25a488\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.441730 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ls5s\" (UniqueName: \"kubernetes.io/projected/3a72da9f-8fa2-4aa5-aab7-2175f034ede8-kube-api-access-9ls5s\") pod \"neutron-operator-controller-manager-797d478b46-b2j7z\" (UID: \"3a72da9f-8fa2-4aa5-aab7-2175f034ede8\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.451879 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.463766 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.464266 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.481041 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqkhp\" (UniqueName: \"kubernetes.io/projected/d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c-kube-api-access-lqkhp\") pod \"ironic-operator-controller-manager-74cb5cbc49-7g4mb\" (UID: \"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.484479 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.486754 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.492779 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.502063 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-rrfsd" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.522396 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.523722 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.525696 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-bk7dh" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.539421 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.540420 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.541170 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.542750 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8pb4\" (UniqueName: \"kubernetes.io/projected/812a090b-267a-4899-a41c-e51592e6ca5b-kube-api-access-h8pb4\") pod \"octavia-operator-controller-manager-6d7c7ddf95-t8zhf\" (UID: \"812a090b-267a-4899-a41c-e51592e6ca5b\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.542851 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ls5s\" (UniqueName: \"kubernetes.io/projected/3a72da9f-8fa2-4aa5-aab7-2175f034ede8-kube-api-access-9ls5s\") pod \"neutron-operator-controller-manager-797d478b46-b2j7z\" (UID: \"3a72da9f-8fa2-4aa5-aab7-2175f034ede8\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.542915 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tf49\" (UniqueName: \"kubernetes.io/projected/0024a375-268b-4c89-ad32-2b3876e271af-kube-api-access-4tf49\") pod \"manila-operator-controller-manager-59578bc799-tdnrr\" (UID: \"0024a375-268b-4c89-ad32-2b3876e271af\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.542944 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mtp6\" (UniqueName: \"kubernetes.io/projected/4141c889-724a-4a6f-886c-d1b6fa852d0f-kube-api-access-2mtp6\") pod \"mariadb-operator-controller-manager-5777b4f897-6s6tx\" (UID: \"4141c889-724a-4a6f-886c-d1b6fa852d0f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.543001 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxnz5\" (UniqueName: \"kubernetes.io/projected/32fef3bc-b4cb-460e-8d36-0ba75c16d394-kube-api-access-dxnz5\") pod \"nova-operator-controller-manager-57bb74c7bf-qz69s\" (UID: \"32fef3bc-b4cb-460e-8d36-0ba75c16d394\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.543124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bqzp\" (UniqueName: \"kubernetes.io/projected/117e3f88-b1fd-4738-bd66-8c8e0e25a488-kube-api-access-9bqzp\") pod \"keystone-operator-controller-manager-ddb98f99b-pnlww\" (UID: \"117e3f88-b1fd-4738-bd66-8c8e0e25a488\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.543302 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-wtr44" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.543879 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.567916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ls5s\" (UniqueName: \"kubernetes.io/projected/3a72da9f-8fa2-4aa5-aab7-2175f034ede8-kube-api-access-9ls5s\") pod \"neutron-operator-controller-manager-797d478b46-b2j7z\" (UID: \"3a72da9f-8fa2-4aa5-aab7-2175f034ede8\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.573735 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bqzp\" (UniqueName: \"kubernetes.io/projected/117e3f88-b1fd-4738-bd66-8c8e0e25a488-kube-api-access-9bqzp\") pod \"keystone-operator-controller-manager-ddb98f99b-pnlww\" (UID: \"117e3f88-b1fd-4738-bd66-8c8e0e25a488\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.597099 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mtp6\" (UniqueName: \"kubernetes.io/projected/4141c889-724a-4a6f-886c-d1b6fa852d0f-kube-api-access-2mtp6\") pod \"mariadb-operator-controller-manager-5777b4f897-6s6tx\" (UID: \"4141c889-724a-4a6f-886c-d1b6fa852d0f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.601607 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.620117 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.626465 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.629356 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.629755 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.629972 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-s45fl" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.634181 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tf49\" (UniqueName: \"kubernetes.io/projected/0024a375-268b-4c89-ad32-2b3876e271af-kube-api-access-4tf49\") pod \"manila-operator-controller-manager-59578bc799-tdnrr\" (UID: \"0024a375-268b-4c89-ad32-2b3876e271af\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.648272 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.648875 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxnz5\" (UniqueName: \"kubernetes.io/projected/32fef3bc-b4cb-460e-8d36-0ba75c16d394-kube-api-access-dxnz5\") pod \"nova-operator-controller-manager-57bb74c7bf-qz69s\" (UID: \"32fef3bc-b4cb-460e-8d36-0ba75c16d394\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.648940 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.648999 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5t2g\" (UniqueName: \"kubernetes.io/projected/8b396a39-6575-426e-b333-da637fbe5616-kube-api-access-d5t2g\") pod \"ovn-operator-controller-manager-869cc7797f-pj45b\" (UID: \"8b396a39-6575-426e-b333-da637fbe5616\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.649048 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8pb4\" (UniqueName: \"kubernetes.io/projected/812a090b-267a-4899-a41c-e51592e6ca5b-kube-api-access-h8pb4\") pod \"octavia-operator-controller-manager-6d7c7ddf95-t8zhf\" (UID: \"812a090b-267a-4899-a41c-e51592e6ca5b\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.649107 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z2n9\" (UniqueName: \"kubernetes.io/projected/98aca1bd-63ee-4285-a903-64cd82c6226c-kube-api-access-8z2n9\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.652297 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.663946 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.698592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8pb4\" (UniqueName: \"kubernetes.io/projected/812a090b-267a-4899-a41c-e51592e6ca5b-kube-api-access-h8pb4\") pod \"octavia-operator-controller-manager-6d7c7ddf95-t8zhf\" (UID: \"812a090b-267a-4899-a41c-e51592e6ca5b\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.738704 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxnz5\" (UniqueName: \"kubernetes.io/projected/32fef3bc-b4cb-460e-8d36-0ba75c16d394-kube-api-access-dxnz5\") pod \"nova-operator-controller-manager-57bb74c7bf-qz69s\" (UID: \"32fef3bc-b4cb-460e-8d36-0ba75c16d394\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.756243 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.758395 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z2n9\" (UniqueName: \"kubernetes.io/projected/98aca1bd-63ee-4285-a903-64cd82c6226c-kube-api-access-8z2n9\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.758496 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.758529 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5t2g\" (UniqueName: \"kubernetes.io/projected/8b396a39-6575-426e-b333-da637fbe5616-kube-api-access-d5t2g\") pod \"ovn-operator-controller-manager-869cc7797f-pj45b\" (UID: \"8b396a39-6575-426e-b333-da637fbe5616\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.758558 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mpfq\" (UniqueName: \"kubernetes.io/projected/cf87e411-c213-4287-bd23-381ea5be1a1b-kube-api-access-6mpfq\") pod \"placement-operator-controller-manager-664664cb68-mk2fh\" (UID: \"cf87e411-c213-4287-bd23-381ea5be1a1b\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:18 crc kubenswrapper[4762]: E1009 13:43:18.759200 4762 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 13:43:18 crc kubenswrapper[4762]: E1009 13:43:18.759236 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert podName:98aca1bd-63ee-4285-a903-64cd82c6226c nodeName:}" failed. No retries permitted until 2025-10-09 13:43:19.259223771 +0000 UTC m=+1075.033014810 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert") pod "openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" (UID: "98aca1bd-63ee-4285-a903-64cd82c6226c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.778470 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.779561 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z2n9\" (UniqueName: \"kubernetes.io/projected/98aca1bd-63ee-4285-a903-64cd82c6226c-kube-api-access-8z2n9\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.782503 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.782977 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5t2g\" (UniqueName: \"kubernetes.io/projected/8b396a39-6575-426e-b333-da637fbe5616-kube-api-access-d5t2g\") pod \"ovn-operator-controller-manager-869cc7797f-pj45b\" (UID: \"8b396a39-6575-426e-b333-da637fbe5616\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.784993 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.793666 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.812191 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-f4bh8" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.822832 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.827448 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.841465 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-gdq2z" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.841836 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.856605 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.856974 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.861206 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mpfq\" (UniqueName: \"kubernetes.io/projected/cf87e411-c213-4287-bd23-381ea5be1a1b-kube-api-access-6mpfq\") pod \"placement-operator-controller-manager-664664cb68-mk2fh\" (UID: \"cf87e411-c213-4287-bd23-381ea5be1a1b\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.861301 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.861354 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pc4s\" (UniqueName: \"kubernetes.io/projected/3660bde9-a2d9-43ee-8052-823fdc1f5db9-kube-api-access-7pc4s\") pod \"swift-operator-controller-manager-5f4d5dfdc6-lwj44\" (UID: \"3660bde9-a2d9-43ee-8052-823fdc1f5db9\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.875257 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9-cert\") pod \"infra-operator-controller-manager-585fc5b659-qv9zg\" (UID: \"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.881231 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.883007 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.901826 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-df29q" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.944860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mpfq\" (UniqueName: \"kubernetes.io/projected/cf87e411-c213-4287-bd23-381ea5be1a1b-kube-api-access-6mpfq\") pod \"placement-operator-controller-manager-664664cb68-mk2fh\" (UID: \"cf87e411-c213-4287-bd23-381ea5be1a1b\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.968064 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp"] Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.985453 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-798v5\" (UniqueName: \"kubernetes.io/projected/6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2-kube-api-access-798v5\") pod \"test-operator-controller-manager-ffcdd6c94-xgnqp\" (UID: \"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.985700 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pc4s\" (UniqueName: \"kubernetes.io/projected/3660bde9-a2d9-43ee-8052-823fdc1f5db9-kube-api-access-7pc4s\") pod \"swift-operator-controller-manager-5f4d5dfdc6-lwj44\" (UID: \"3660bde9-a2d9-43ee-8052-823fdc1f5db9\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:18 crc kubenswrapper[4762]: I1009 13:43:18.986495 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv6zs\" (UniqueName: \"kubernetes.io/projected/2bc7bc27-a390-4830-88cc-2a94e1326a09-kube-api-access-hv6zs\") pod \"telemetry-operator-controller-manager-85fd6d6f45-h6cz8\" (UID: \"2bc7bc27-a390-4830-88cc-2a94e1326a09\") " pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.001054 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.045511 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pc4s\" (UniqueName: \"kubernetes.io/projected/3660bde9-a2d9-43ee-8052-823fdc1f5db9-kube-api-access-7pc4s\") pod \"swift-operator-controller-manager-5f4d5dfdc6-lwj44\" (UID: \"3660bde9-a2d9-43ee-8052-823fdc1f5db9\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.088570 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv6zs\" (UniqueName: \"kubernetes.io/projected/2bc7bc27-a390-4830-88cc-2a94e1326a09-kube-api-access-hv6zs\") pod \"telemetry-operator-controller-manager-85fd6d6f45-h6cz8\" (UID: \"2bc7bc27-a390-4830-88cc-2a94e1326a09\") " pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.088666 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-798v5\" (UniqueName: \"kubernetes.io/projected/6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2-kube-api-access-798v5\") pod \"test-operator-controller-manager-ffcdd6c94-xgnqp\" (UID: \"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.115878 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-798v5\" (UniqueName: \"kubernetes.io/projected/6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2-kube-api-access-798v5\") pod \"test-operator-controller-manager-ffcdd6c94-xgnqp\" (UID: \"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.123383 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv6zs\" (UniqueName: \"kubernetes.io/projected/2bc7bc27-a390-4830-88cc-2a94e1326a09-kube-api-access-hv6zs\") pod \"telemetry-operator-controller-manager-85fd6d6f45-h6cz8\" (UID: \"2bc7bc27-a390-4830-88cc-2a94e1326a09\") " pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.126557 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.170404 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.199832 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.202884 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.204076 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.204142 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.206695 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.207017 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.207048 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.207758 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.207781 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.207834 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.208548 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.209387 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-lqxlb" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.209596 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-x7df5" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.210705 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-mfp54" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.211827 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.272708 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.296468 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm2p8\" (UniqueName: \"kubernetes.io/projected/18635a37-db18-44f8-94a2-1245456d943a-kube-api-access-dm2p8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-l647r\" (UID: \"18635a37-db18-44f8-94a2-1245456d943a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.296529 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmkkg\" (UniqueName: \"kubernetes.io/projected/2acaeb4c-8968-4dc3-9d61-0ffe8389067d-kube-api-access-kmkkg\") pod \"watcher-operator-controller-manager-646675d848-rx4k9\" (UID: \"2acaeb4c-8968-4dc3-9d61-0ffe8389067d\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.296558 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-cert\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.296598 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjzsk\" (UniqueName: \"kubernetes.io/projected/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-kube-api-access-jjzsk\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.297487 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:19 crc kubenswrapper[4762]: E1009 13:43:19.298783 4762 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 13:43:19 crc kubenswrapper[4762]: E1009 13:43:19.298930 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert podName:98aca1bd-63ee-4285-a903-64cd82c6226c nodeName:}" failed. No retries permitted until 2025-10-09 13:43:20.2989092 +0000 UTC m=+1076.072700239 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert") pod "openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" (UID: "98aca1bd-63ee-4285-a903-64cd82c6226c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.384906 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.405742 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm2p8\" (UniqueName: \"kubernetes.io/projected/18635a37-db18-44f8-94a2-1245456d943a-kube-api-access-dm2p8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-l647r\" (UID: \"18635a37-db18-44f8-94a2-1245456d943a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.406103 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmkkg\" (UniqueName: \"kubernetes.io/projected/2acaeb4c-8968-4dc3-9d61-0ffe8389067d-kube-api-access-kmkkg\") pod \"watcher-operator-controller-manager-646675d848-rx4k9\" (UID: \"2acaeb4c-8968-4dc3-9d61-0ffe8389067d\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.406137 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-cert\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.406181 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjzsk\" (UniqueName: \"kubernetes.io/projected/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-kube-api-access-jjzsk\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.426095 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-cert\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.436492 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm2p8\" (UniqueName: \"kubernetes.io/projected/18635a37-db18-44f8-94a2-1245456d943a-kube-api-access-dm2p8\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-l647r\" (UID: \"18635a37-db18-44f8-94a2-1245456d943a\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.438178 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjzsk\" (UniqueName: \"kubernetes.io/projected/cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec-kube-api-access-jjzsk\") pod \"openstack-operator-controller-manager-694c8c648f-5xrql\" (UID: \"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec\") " pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.439688 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmkkg\" (UniqueName: \"kubernetes.io/projected/2acaeb4c-8968-4dc3-9d61-0ffe8389067d-kube-api-access-kmkkg\") pod \"watcher-operator-controller-manager-646675d848-rx4k9\" (UID: \"2acaeb4c-8968-4dc3-9d61-0ffe8389067d\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.553259 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.572317 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.713959 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.735383 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.746189 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.863784 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k"] Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.888515 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj"] Oct 09 13:43:19 crc kubenswrapper[4762]: W1009 13:43:19.891310 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b9fae99_ccfb_4f2e_9225_7eb67624ee5a.slice/crio-892a5c73ba70bcc9be595ffbc6e5997bc7aa283894f2ba9094ff28f4239c62c4 WatchSource:0}: Error finding container 892a5c73ba70bcc9be595ffbc6e5997bc7aa283894f2ba9094ff28f4239c62c4: Status 404 returned error can't find the container with id 892a5c73ba70bcc9be595ffbc6e5997bc7aa283894f2ba9094ff28f4239c62c4 Oct 09 13:43:19 crc kubenswrapper[4762]: I1009 13:43:19.898571 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz"] Oct 09 13:43:19 crc kubenswrapper[4762]: W1009 13:43:19.907547 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda226d4b7_ceef_4cfd_aeb8_727fb0c8786d.slice/crio-cd21a5247e8655cdbdbc4f3e3fc6077be131eb299562210ce8bbe552d004f4f7 WatchSource:0}: Error finding container cd21a5247e8655cdbdbc4f3e3fc6077be131eb299562210ce8bbe552d004f4f7: Status 404 returned error can't find the container with id cd21a5247e8655cdbdbc4f3e3fc6077be131eb299562210ce8bbe552d004f4f7 Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.035443 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.039724 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.044061 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" event={"ID":"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d","Type":"ContainerStarted","Data":"cd21a5247e8655cdbdbc4f3e3fc6077be131eb299562210ce8bbe552d004f4f7"} Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.045903 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" event={"ID":"f19642d0-b3f1-4de9-811a-8bd523f204c1","Type":"ContainerStarted","Data":"1036c4e1e073226b1382457a8286c269030c3abfc7d74f17603b526295f4ce20"} Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.046593 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.048230 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" event={"ID":"b22d7fd7-7386-4c10-9133-7703f8f2e0b4","Type":"ContainerStarted","Data":"d76a4d4732e3583ed8cce41cf3502e8ca054a58ee4b5642ca4f45647fa3e22e0"} Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.050252 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod812a090b_267a_4899_a41c_e51592e6ca5b.slice/crio-3b35053e25966cb5bb80c6b609de9c8ee1ec5b639454dad7b4c7a492da0d9d7c WatchSource:0}: Error finding container 3b35053e25966cb5bb80c6b609de9c8ee1ec5b639454dad7b4c7a492da0d9d7c: Status 404 returned error can't find the container with id 3b35053e25966cb5bb80c6b609de9c8ee1ec5b639454dad7b4c7a492da0d9d7c Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.060453 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" event={"ID":"0302ab2b-3fbe-4d08-8364-872d1c1be2b7","Type":"ContainerStarted","Data":"767fa1d1e999bd96af9d25d481dcfff87bf6265719060af1946bf3fbab9b6690"} Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.060768 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr"] Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.065552 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b396a39_6575_426e_b333_da637fbe5616.slice/crio-e9218f246bb8ee241dc6ef2d5ca829213cf8f35ae32fbd518797fb87ea275f7f WatchSource:0}: Error finding container e9218f246bb8ee241dc6ef2d5ca829213cf8f35ae32fbd518797fb87ea275f7f: Status 404 returned error can't find the container with id e9218f246bb8ee241dc6ef2d5ca829213cf8f35ae32fbd518797fb87ea275f7f Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.074505 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" event={"ID":"6484b20c-9ee2-4134-bbc6-5c57c175f1db","Type":"ContainerStarted","Data":"dff22922b22a2e499cd50c55805444dee5c3b5dcbab8076d0ad7453de422b8e9"} Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.083188 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod117e3f88_b1fd_4738_bd66_8c8e0e25a488.slice/crio-a754d9cc40ac80d75e0d693942cabdee9d6a642eaae130e7f315f67f03dec817 WatchSource:0}: Error finding container a754d9cc40ac80d75e0d693942cabdee9d6a642eaae130e7f315f67f03dec817: Status 404 returned error can't find the container with id a754d9cc40ac80d75e0d693942cabdee9d6a642eaae130e7f315f67f03dec817 Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.084822 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" event={"ID":"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a","Type":"ContainerStarted","Data":"892a5c73ba70bcc9be595ffbc6e5997bc7aa283894f2ba9094ff28f4239c62c4"} Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.096323 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.109283 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.264149 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.281563 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.303752 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx"] Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.310297 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf87e411_c213_4287_bd23_381ea5be1a1b.slice/crio-67ab7129539fb23c3228bcacf069d0dd696477b9a101f9c2d5b6f55daa447c8a WatchSource:0}: Error finding container 67ab7129539fb23c3228bcacf069d0dd696477b9a101f9c2d5b6f55daa447c8a: Status 404 returned error can't find the container with id 67ab7129539fb23c3228bcacf069d0dd696477b9a101f9c2d5b6f55daa447c8a Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.314502 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32fef3bc_b4cb_460e_8d36_0ba75c16d394.slice/crio-4050671a651651425525baad0e9a6b32666e07707b76d52cf47441373ffe1e52 WatchSource:0}: Error finding container 4050671a651651425525baad0e9a6b32666e07707b76d52cf47441373ffe1e52: Status 404 returned error can't find the container with id 4050671a651651425525baad0e9a6b32666e07707b76d52cf47441373ffe1e52 Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.315055 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d33c1f507e1f5b9a4bf226ad98917e92101ac66b36e19d35cbe04ae7014f6bff,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6mpfq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-664664cb68-mk2fh_openstack-operators(cf87e411-c213-4287-bd23-381ea5be1a1b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.318475 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:b2e9acf568a48c28cf2aed6012e432eeeb7d5f0eb11878fc91b62bc34cba10cd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dxnz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-57bb74c7bf-qz69s_openstack-operators(32fef3bc-b4cb-460e-8d36-0ba75c16d394): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.320595 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2mtp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-5777b4f897-6s6tx_openstack-operators(4141c889-724a-4a6f-886c-d1b6fa852d0f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.326583 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.334103 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.358940 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/98aca1bd-63ee-4285-a903-64cd82c6226c-cert\") pod \"openstack-baremetal-operator-controller-manager-5458df59d8mmjs7\" (UID: \"98aca1bd-63ee-4285-a903-64cd82c6226c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.365495 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.369748 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.418406 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg"] Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.424470 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-798v5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-ffcdd6c94-xgnqp_openstack-operators(6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.440483 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z2ddk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-585fc5b659-qv9zg_openstack-operators(a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.443123 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp"] Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.443792 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kmkkg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-646675d848-rx4k9_openstack-operators(2acaeb4c-8968-4dc3-9d61-0ffe8389067d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.445849 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdc9b29d_6c7e_4e07_82a7_3b2fe39d45ec.slice/crio-c04a03708302b3382dc7338a26a15f8781afaedae0a082731a5025b09213f080 WatchSource:0}: Error finding container c04a03708302b3382dc7338a26a15f8781afaedae0a082731a5025b09213f080: Status 404 returned error can't find the container with id c04a03708302b3382dc7338a26a15f8781afaedae0a082731a5025b09213f080 Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.455767 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.462962 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8"] Oct 09 13:43:20 crc kubenswrapper[4762]: I1009 13:43:20.476235 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql"] Oct 09 13:43:20 crc kubenswrapper[4762]: W1009 13:43:20.499906 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bc7bc27_a390_4830_88cc_2a94e1326a09.slice/crio-5172d3ee35358e020f4cccc7f46e83b5877f9973e2d5478f3c5e7dca89425b1a WatchSource:0}: Error finding container 5172d3ee35358e020f4cccc7f46e83b5877f9973e2d5478f3c5e7dca89425b1a: Status 404 returned error can't find the container with id 5172d3ee35358e020f4cccc7f46e83b5877f9973e2d5478f3c5e7dca89425b1a Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.506374 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.199:5001/openstack-k8s-operators/telemetry-operator:ee3c4a2dbfdf3a3f2892f73a44dba8a3c2e71a26,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hv6zs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-85fd6d6f45-h6cz8_openstack-operators(2bc7bc27-a390-4830-88cc-2a94e1326a09): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.569739 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" podUID="32fef3bc-b4cb-460e-8d36-0ba75c16d394" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.571036 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" podUID="cf87e411-c213-4287-bd23-381ea5be1a1b" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.713105 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" podUID="a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.723289 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" podUID="4141c889-724a-4a6f-886c-d1b6fa852d0f" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.800753 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" podUID="6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.875716 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" podUID="2acaeb4c-8968-4dc3-9d61-0ffe8389067d" Oct 09 13:43:20 crc kubenswrapper[4762]: E1009 13:43:20.915071 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" podUID="2bc7bc27-a390-4830-88cc-2a94e1326a09" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.000728 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7"] Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.108168 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" event={"ID":"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c","Type":"ContainerStarted","Data":"33c7f2645795915fefcb2336e91ffbf9d839769c7bb0353d7777b3d8587890a2"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.120832 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" event={"ID":"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9","Type":"ContainerStarted","Data":"2af0f126f2f0cb1ed1d3f4157ed3356362163569a54c150f941cd267e2a349a6"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.120885 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" event={"ID":"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9","Type":"ContainerStarted","Data":"c9ce56d9259c77a7539dc98c7341fd99388084f180c88525ed4317248c76fe3b"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.129055 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" podUID="a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.137204 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" event={"ID":"98aca1bd-63ee-4285-a903-64cd82c6226c","Type":"ContainerStarted","Data":"8a384881c29d114e9201249abe28db8837cc3e5d0919296f4e72e63bb8e9b7f8"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.159208 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" event={"ID":"2bc7bc27-a390-4830-88cc-2a94e1326a09","Type":"ContainerStarted","Data":"f1cbec4bcff5c599984dcdedc37202d545466cf93848b6122b6e48ef24bae5f5"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.159253 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" event={"ID":"2bc7bc27-a390-4830-88cc-2a94e1326a09","Type":"ContainerStarted","Data":"5172d3ee35358e020f4cccc7f46e83b5877f9973e2d5478f3c5e7dca89425b1a"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.161065 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.199:5001/openstack-k8s-operators/telemetry-operator:ee3c4a2dbfdf3a3f2892f73a44dba8a3c2e71a26\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" podUID="2bc7bc27-a390-4830-88cc-2a94e1326a09" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.172503 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" event={"ID":"2acaeb4c-8968-4dc3-9d61-0ffe8389067d","Type":"ContainerStarted","Data":"e0ab7c691e5ce8aa19428382688b25fd602e475ab347e9dcaa219129e9752fa0"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.172553 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" event={"ID":"2acaeb4c-8968-4dc3-9d61-0ffe8389067d","Type":"ContainerStarted","Data":"457b8d60ac50b205db8cedba9af63ad7ecfb7ef4605275e803daa734a7afd8f6"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.174151 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" podUID="2acaeb4c-8968-4dc3-9d61-0ffe8389067d" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.175119 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" event={"ID":"812a090b-267a-4899-a41c-e51592e6ca5b","Type":"ContainerStarted","Data":"3b35053e25966cb5bb80c6b609de9c8ee1ec5b639454dad7b4c7a492da0d9d7c"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.184970 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" event={"ID":"32fef3bc-b4cb-460e-8d36-0ba75c16d394","Type":"ContainerStarted","Data":"2479aba4b774ae23f714728376fe21827a5320e09c24dfab6af8551772ceb70d"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.185010 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" event={"ID":"32fef3bc-b4cb-460e-8d36-0ba75c16d394","Type":"ContainerStarted","Data":"4050671a651651425525baad0e9a6b32666e07707b76d52cf47441373ffe1e52"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.186705 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:b2e9acf568a48c28cf2aed6012e432eeeb7d5f0eb11878fc91b62bc34cba10cd\\\"\"" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" podUID="32fef3bc-b4cb-460e-8d36-0ba75c16d394" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.197138 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" event={"ID":"8b396a39-6575-426e-b333-da637fbe5616","Type":"ContainerStarted","Data":"e9218f246bb8ee241dc6ef2d5ca829213cf8f35ae32fbd518797fb87ea275f7f"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.198521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" event={"ID":"0024a375-268b-4c89-ad32-2b3876e271af","Type":"ContainerStarted","Data":"2831dac3bd7edf6ae284bf689e8d7bab9947c1ed4ba9043b73ccb44c9601360a"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.207705 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" event={"ID":"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec","Type":"ContainerStarted","Data":"bef05ac72b931562a2c863ac13e6c5b306734e940005ec757d0c0de9562a68ff"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.207752 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" event={"ID":"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec","Type":"ContainerStarted","Data":"c6996600fb3de780957dc2d4795e9d22765cb54756cf7a093e030eb7bad5a70c"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.207761 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" event={"ID":"cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec","Type":"ContainerStarted","Data":"c04a03708302b3382dc7338a26a15f8781afaedae0a082731a5025b09213f080"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.210947 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.222806 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" event={"ID":"cf87e411-c213-4287-bd23-381ea5be1a1b","Type":"ContainerStarted","Data":"6f87cc4594c6beec6bbbc8a1dea0229556195d5a7a9c8c0b52824fe43b313110"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.222851 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" event={"ID":"cf87e411-c213-4287-bd23-381ea5be1a1b","Type":"ContainerStarted","Data":"67ab7129539fb23c3228bcacf069d0dd696477b9a101f9c2d5b6f55daa447c8a"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.230423 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d33c1f507e1f5b9a4bf226ad98917e92101ac66b36e19d35cbe04ae7014f6bff\\\"\"" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" podUID="cf87e411-c213-4287-bd23-381ea5be1a1b" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.230741 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" event={"ID":"3a72da9f-8fa2-4aa5-aab7-2175f034ede8","Type":"ContainerStarted","Data":"fd51a2e8b3408dad8217d72c8528f44f2f34f131a87593850dccc4e5fdda0cea"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.233201 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" event={"ID":"117e3f88-b1fd-4738-bd66-8c8e0e25a488","Type":"ContainerStarted","Data":"a754d9cc40ac80d75e0d693942cabdee9d6a642eaae130e7f315f67f03dec817"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.237569 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" event={"ID":"3660bde9-a2d9-43ee-8052-823fdc1f5db9","Type":"ContainerStarted","Data":"c193a78bbf59b397220f4ef7f7c1278df4cb99c441720c1ea8a861d43ffc2b27"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.260011 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" event={"ID":"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2","Type":"ContainerStarted","Data":"1b3d1175d059d0868cbb283cc39966af5f97f77c980ed2068ff1074945dade74"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.260066 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" event={"ID":"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2","Type":"ContainerStarted","Data":"b2ecf32c3c37eadc79237443665609426a7c89a88b9168dc84bdfbbb0725844e"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.261572 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" event={"ID":"4141c889-724a-4a6f-886c-d1b6fa852d0f","Type":"ContainerStarted","Data":"2d33568e156cacfeadfddbd8a748d3b61f310defa8c6ff16e36795530e80f139"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.261618 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" event={"ID":"4141c889-724a-4a6f-886c-d1b6fa852d0f","Type":"ContainerStarted","Data":"ec713b1be6853f0d0d37dc19d54567e942d72ced35131df183a2168a796ba487"} Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.272702 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" podUID="6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2" Oct 09 13:43:21 crc kubenswrapper[4762]: E1009 13:43:21.272828 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" podUID="4141c889-724a-4a6f-886c-d1b6fa852d0f" Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.294104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" event={"ID":"18635a37-db18-44f8-94a2-1245456d943a","Type":"ContainerStarted","Data":"23a25ef2a4878c3fd5d7f16de0aed577130a389ff9cf7dc0762695aa95949e56"} Oct 09 13:43:21 crc kubenswrapper[4762]: I1009 13:43:21.310082 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" podStartSLOduration=3.310057484 podStartE2EDuration="3.310057484s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:43:21.301908287 +0000 UTC m=+1077.075699326" watchObservedRunningTime="2025-10-09 13:43:21.310057484 +0000 UTC m=+1077.083848523" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.306906 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" podUID="4141c889-724a-4a6f-886c-d1b6fa852d0f" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.307598 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" podUID="a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.307677 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" podUID="2acaeb4c-8968-4dc3-9d61-0ffe8389067d" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.307739 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.199:5001/openstack-k8s-operators/telemetry-operator:ee3c4a2dbfdf3a3f2892f73a44dba8a3c2e71a26\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" podUID="2bc7bc27-a390-4830-88cc-2a94e1326a09" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.307776 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" podUID="6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.307909 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d33c1f507e1f5b9a4bf226ad98917e92101ac66b36e19d35cbe04ae7014f6bff\\\"\"" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" podUID="cf87e411-c213-4287-bd23-381ea5be1a1b" Oct 09 13:43:22 crc kubenswrapper[4762]: E1009 13:43:22.309529 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:b2e9acf568a48c28cf2aed6012e432eeeb7d5f0eb11878fc91b62bc34cba10cd\\\"\"" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" podUID="32fef3bc-b4cb-460e-8d36-0ba75c16d394" Oct 09 13:43:29 crc kubenswrapper[4762]: I1009 13:43:29.591099 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-694c8c648f-5xrql" Oct 09 13:43:34 crc kubenswrapper[4762]: E1009 13:43:34.925591 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8" Oct 09 13:43:34 crc kubenswrapper[4762]: E1009 13:43:34.926151 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4tf49,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-59578bc799-tdnrr_openstack-operators(0024a375-268b-4c89-ad32-2b3876e271af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:43:35 crc kubenswrapper[4762]: E1009 13:43:35.776221 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" podUID="0024a375-268b-4c89-ad32-2b3876e271af" Oct 09 13:43:36 crc kubenswrapper[4762]: I1009 13:43:36.524304 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" event={"ID":"0024a375-268b-4c89-ad32-2b3876e271af","Type":"ContainerStarted","Data":"bb0c925b2ad0ba284a80a478c7d1ea57562f37934a8b0514e4672b889c2969cc"} Oct 09 13:43:36 crc kubenswrapper[4762]: E1009 13:43:36.526270 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" podUID="0024a375-268b-4c89-ad32-2b3876e271af" Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.533598 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" event={"ID":"3660bde9-a2d9-43ee-8052-823fdc1f5db9","Type":"ContainerStarted","Data":"40d66c2baec81a7565263ac408fa0d6b74c5b461048946b59cbcd97dfbf0b71d"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.540480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" event={"ID":"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a","Type":"ContainerStarted","Data":"c6050b9cc1f3094edbb390a82c7523a2cdc69a78fb6bedd24e5dbcbf2f38bff4"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.544618 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" event={"ID":"18635a37-db18-44f8-94a2-1245456d943a","Type":"ContainerStarted","Data":"f3835e775ea15053cd1639bb1588c82761beabb2391d739a6ce9149d4f87f138"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.548173 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" event={"ID":"3a72da9f-8fa2-4aa5-aab7-2175f034ede8","Type":"ContainerStarted","Data":"da1834adb35653e5945f48f633080d8c0a6993d78066b9ef5abb2c39134541a5"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.549943 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" event={"ID":"0302ab2b-3fbe-4d08-8364-872d1c1be2b7","Type":"ContainerStarted","Data":"3720b499de2180fabb912a4938b2784861fb4759b31ea84e53dd9fd6d3b33540"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.551494 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" event={"ID":"117e3f88-b1fd-4738-bd66-8c8e0e25a488","Type":"ContainerStarted","Data":"561880cddaea6fe8e2a15cc491d1a0385e9780ebb96fa32c2d25a0dc6e68fc5e"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.552911 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" event={"ID":"8b396a39-6575-426e-b333-da637fbe5616","Type":"ContainerStarted","Data":"9b064ee71a1782bbae14ea54d9e0b9eed5d92ff50811a8a543ac2b05a57b1877"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.554228 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" event={"ID":"812a090b-267a-4899-a41c-e51592e6ca5b","Type":"ContainerStarted","Data":"5eed2a36bc8fc9af709797d4b8ba37e2522a041bddbed5786d0c281ac33470a3"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.555359 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" event={"ID":"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d","Type":"ContainerStarted","Data":"cda69c363c58ad5e5d9d04aaf2d51e1df85970669f36179276fee96d02a9c625"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.556427 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" event={"ID":"b22d7fd7-7386-4c10-9133-7703f8f2e0b4","Type":"ContainerStarted","Data":"bdb8411ca14a2eb9f96b1bc1b56fce2f2d28034f07957aa1e02df32d8cef7d56"} Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.557619 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" event={"ID":"f19642d0-b3f1-4de9-811a-8bd523f204c1","Type":"ContainerStarted","Data":"f9e540bb4ea713890aae89e8ea311cc407303c340397efbcdd428a637e307724"} Oct 09 13:43:37 crc kubenswrapper[4762]: E1009 13:43:37.559046 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" podUID="0024a375-268b-4c89-ad32-2b3876e271af" Oct 09 13:43:37 crc kubenswrapper[4762]: I1009 13:43:37.583462 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-l647r" podStartSLOduration=4.138388361 podStartE2EDuration="19.583441595s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.42432006 +0000 UTC m=+1076.198111099" lastFinishedPulling="2025-10-09 13:43:35.869373294 +0000 UTC m=+1091.643164333" observedRunningTime="2025-10-09 13:43:37.568154369 +0000 UTC m=+1093.341945428" watchObservedRunningTime="2025-10-09 13:43:37.583441595 +0000 UTC m=+1093.357232634" Oct 09 13:43:39 crc kubenswrapper[4762]: I1009 13:43:39.577933 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" event={"ID":"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c","Type":"ContainerStarted","Data":"159b797bcf2a59dddcb5672b713de79cb433ba7b65c622f58c8aaa9a658869f3"} Oct 09 13:43:39 crc kubenswrapper[4762]: I1009 13:43:39.581146 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" event={"ID":"6484b20c-9ee2-4134-bbc6-5c57c175f1db","Type":"ContainerStarted","Data":"62460d988f9d95a08dba41df215226d8ce591f258ac6ee28ed0f0b9c6dda3953"} Oct 09 13:43:41 crc kubenswrapper[4762]: I1009 13:43:41.595309 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" event={"ID":"98aca1bd-63ee-4285-a903-64cd82c6226c","Type":"ContainerStarted","Data":"534f46803aa1b7213ec689848fd827076319b5b5760520387994c51ba0fcb1fe"} Oct 09 13:43:42 crc kubenswrapper[4762]: I1009 13:43:42.608589 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" event={"ID":"b22d7fd7-7386-4c10-9133-7703f8f2e0b4","Type":"ContainerStarted","Data":"34de65967698eb8df2e8c5af669f79890787b04642313b96c93133b0d571a56c"} Oct 09 13:43:42 crc kubenswrapper[4762]: I1009 13:43:42.609021 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:42 crc kubenswrapper[4762]: I1009 13:43:42.612468 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" Oct 09 13:43:42 crc kubenswrapper[4762]: I1009 13:43:42.649986 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-z8s2p" podStartSLOduration=8.241710711 podStartE2EDuration="24.649963156s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.219958742 +0000 UTC m=+1074.993749781" lastFinishedPulling="2025-10-09 13:43:35.628211187 +0000 UTC m=+1091.402002226" observedRunningTime="2025-10-09 13:43:42.631521166 +0000 UTC m=+1098.405312205" watchObservedRunningTime="2025-10-09 13:43:42.649963156 +0000 UTC m=+1098.423754215" Oct 09 13:43:44 crc kubenswrapper[4762]: I1009 13:43:44.627889 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" event={"ID":"98aca1bd-63ee-4285-a903-64cd82c6226c","Type":"ContainerStarted","Data":"77953e6a3fa92768c1ea9f2d31d2bd8e74440cd099dfa1fd735556513eec08e1"} Oct 09 13:43:44 crc kubenswrapper[4762]: I1009 13:43:44.630139 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" event={"ID":"3a72da9f-8fa2-4aa5-aab7-2175f034ede8","Type":"ContainerStarted","Data":"0f45f0a0c264f34bfb1904782dba164aa185aa3af64ee94f6eb8f5f3eed82251"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.640161 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" event={"ID":"812a090b-267a-4899-a41c-e51592e6ca5b","Type":"ContainerStarted","Data":"81f4ac82a41f8786d9ea4b8946f53e7a9719d0092cab591507f773e0d2b5714d"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.640423 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.642945 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" event={"ID":"6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2","Type":"ContainerStarted","Data":"6aac2581358c7ddeb4812f6bb871a434c0464f11a16e2fc019460185996a1e92"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.643151 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.643292 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.645031 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" event={"ID":"2acaeb4c-8968-4dc3-9d61-0ffe8389067d","Type":"ContainerStarted","Data":"dac9af117187fb5acf06474b828b3a6e45f95062b7c9132f32f7ebb9f5b2977b"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.645265 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.646897 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" event={"ID":"32fef3bc-b4cb-460e-8d36-0ba75c16d394","Type":"ContainerStarted","Data":"b1bcf608cc758dc12fa724e4b0c14f76ec2a3a409d3f7d18a381f08b916dc971"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.647098 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.650086 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" event={"ID":"1b9fae99-ccfb-4f2e-9225-7eb67624ee5a","Type":"ContainerStarted","Data":"f16b7f2088b0d25561692674b8cb69094dc795f0e5774ca0ce0f3e82a65887b0"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.650292 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.655889 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.656742 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" event={"ID":"a226d4b7-ceef-4cfd-aeb8-727fb0c8786d","Type":"ContainerStarted","Data":"1516d061b896e858fb8ff0f3900e391abe4ed75d509d80b82682136c1ad0ec27"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.656952 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.658484 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" event={"ID":"0302ab2b-3fbe-4d08-8364-872d1c1be2b7","Type":"ContainerStarted","Data":"9b6ea89fa42133da1629227790c3c31c20afe759245bfc4759b4fe1519599265"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.659292 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.660546 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.660894 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.661480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" event={"ID":"3660bde9-a2d9-43ee-8052-823fdc1f5db9","Type":"ContainerStarted","Data":"687d984e307c9dbdaab089b605c0b1c502dcffaf1514572150b27ce11172b8a8"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.661666 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.663362 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.665925 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" event={"ID":"a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9","Type":"ContainerStarted","Data":"1dfe4a6f34d1ccae71f6742c0851c1c593e4bad73c2b44528c0e5a4b350565ba"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.666198 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.668908 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" event={"ID":"cf87e411-c213-4287-bd23-381ea5be1a1b","Type":"ContainerStarted","Data":"59bb95596e700965c1ff9df426adbee5fb708c27f0d4f2d52e57aeebd07eef27"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.669198 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-t8zhf" podStartSLOduration=12.044846025 podStartE2EDuration="27.669179322s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.060402472 +0000 UTC m=+1075.834193511" lastFinishedPulling="2025-10-09 13:43:35.684735769 +0000 UTC m=+1091.458526808" observedRunningTime="2025-10-09 13:43:45.665530896 +0000 UTC m=+1101.439321935" watchObservedRunningTime="2025-10-09 13:43:45.669179322 +0000 UTC m=+1101.442970361" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.669581 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.672157 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" event={"ID":"f19642d0-b3f1-4de9-811a-8bd523f204c1","Type":"ContainerStarted","Data":"e47840ebce62f245789c1e0bfe18d7437fa0188761708e54b79a4fbf791cd689"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.673139 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.675567 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.679767 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" event={"ID":"6484b20c-9ee2-4134-bbc6-5c57c175f1db","Type":"ContainerStarted","Data":"eb9fbc941f8cdddd4944f5572412107891ed9e5d70655c47b4250166c07b5141"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.681259 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.681766 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.683283 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" event={"ID":"8b396a39-6575-426e-b333-da637fbe5616","Type":"ContainerStarted","Data":"bab654b5cb5b8fb860ef14fdfdc061080d58c083e8537096367bc280752eab68"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.683795 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.685465 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.686231 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" event={"ID":"d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c","Type":"ContainerStarted","Data":"28bd103dfffe454140149cd058dfd13aa139b05c0807354b68bbc19c65cd1f49"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.686563 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.688298 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" event={"ID":"117e3f88-b1fd-4738-bd66-8c8e0e25a488","Type":"ContainerStarted","Data":"c8d539e5640ce1a82f8a77d01f584339b71517bfe7d32b237e4134005131d783"} Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.688372 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.688424 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.689570 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.694118 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.700031 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.702745 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-sn56k" podStartSLOduration=11.714347064 podStartE2EDuration="27.702723993s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.882332051 +0000 UTC m=+1075.656123090" lastFinishedPulling="2025-10-09 13:43:35.87070898 +0000 UTC m=+1091.644500019" observedRunningTime="2025-10-09 13:43:45.68939335 +0000 UTC m=+1101.463184389" watchObservedRunningTime="2025-10-09 13:43:45.702723993 +0000 UTC m=+1101.476515032" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.734798 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" podStartSLOduration=6.054733356 podStartE2EDuration="27.734779725s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.424349691 +0000 UTC m=+1076.198140740" lastFinishedPulling="2025-10-09 13:43:42.10439607 +0000 UTC m=+1097.878187109" observedRunningTime="2025-10-09 13:43:45.73345032 +0000 UTC m=+1101.507241379" watchObservedRunningTime="2025-10-09 13:43:45.734779725 +0000 UTC m=+1101.508570764" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.773987 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" podStartSLOduration=6.669429306 podStartE2EDuration="27.773972346s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.318248992 +0000 UTC m=+1076.092040031" lastFinishedPulling="2025-10-09 13:43:41.422792032 +0000 UTC m=+1097.196583071" observedRunningTime="2025-10-09 13:43:45.758219608 +0000 UTC m=+1101.532010647" watchObservedRunningTime="2025-10-09 13:43:45.773972346 +0000 UTC m=+1101.547763375" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.775435 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-4n6tj" podStartSLOduration=11.80607127 podStartE2EDuration="27.775430515s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.900617776 +0000 UTC m=+1075.674408815" lastFinishedPulling="2025-10-09 13:43:35.869977021 +0000 UTC m=+1091.643768060" observedRunningTime="2025-10-09 13:43:45.772515428 +0000 UTC m=+1101.546306467" watchObservedRunningTime="2025-10-09 13:43:45.775430515 +0000 UTC m=+1101.549221554" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.792099 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-r6dpz" podStartSLOduration=11.832429911 podStartE2EDuration="27.792081117s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.910568591 +0000 UTC m=+1075.684359630" lastFinishedPulling="2025-10-09 13:43:35.870219797 +0000 UTC m=+1091.644010836" observedRunningTime="2025-10-09 13:43:45.790077885 +0000 UTC m=+1101.563868934" watchObservedRunningTime="2025-10-09 13:43:45.792081117 +0000 UTC m=+1101.565872156" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.838727 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" podStartSLOduration=4.183895651 podStartE2EDuration="27.838703506s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.443562472 +0000 UTC m=+1076.217353511" lastFinishedPulling="2025-10-09 13:43:44.098370337 +0000 UTC m=+1099.872161366" observedRunningTime="2025-10-09 13:43:45.830961741 +0000 UTC m=+1101.604752790" watchObservedRunningTime="2025-10-09 13:43:45.838703506 +0000 UTC m=+1101.612494565" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.861146 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-lwj44" podStartSLOduration=12.273602263 podStartE2EDuration="27.861124162s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.284387303 +0000 UTC m=+1076.058178342" lastFinishedPulling="2025-10-09 13:43:35.871909202 +0000 UTC m=+1091.645700241" observedRunningTime="2025-10-09 13:43:45.85652435 +0000 UTC m=+1101.630315399" watchObservedRunningTime="2025-10-09 13:43:45.861124162 +0000 UTC m=+1101.634915201" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.873818 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-z4xhb" podStartSLOduration=11.76393021 podStartE2EDuration="27.873801958s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.762374883 +0000 UTC m=+1075.536165922" lastFinishedPulling="2025-10-09 13:43:35.872246631 +0000 UTC m=+1091.646037670" observedRunningTime="2025-10-09 13:43:45.873161102 +0000 UTC m=+1101.646952141" watchObservedRunningTime="2025-10-09 13:43:45.873801958 +0000 UTC m=+1101.647592997" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.900666 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" podStartSLOduration=6.920828597 podStartE2EDuration="27.900650412s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.440300246 +0000 UTC m=+1076.214091285" lastFinishedPulling="2025-10-09 13:43:41.420122061 +0000 UTC m=+1097.193913100" observedRunningTime="2025-10-09 13:43:45.896667077 +0000 UTC m=+1101.670458116" watchObservedRunningTime="2025-10-09 13:43:45.900650412 +0000 UTC m=+1101.674441451" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.933808 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-pj45b" podStartSLOduration=12.13348335 podStartE2EDuration="27.933786022s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.073828689 +0000 UTC m=+1075.847619728" lastFinishedPulling="2025-10-09 13:43:35.874131361 +0000 UTC m=+1091.647922400" observedRunningTime="2025-10-09 13:43:45.929763175 +0000 UTC m=+1101.703554224" watchObservedRunningTime="2025-10-09 13:43:45.933786022 +0000 UTC m=+1101.707577061" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.963548 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5458df59d8mmjs7" podStartSLOduration=13.046714203 podStartE2EDuration="27.963524173s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:21.021578309 +0000 UTC m=+1076.795369348" lastFinishedPulling="2025-10-09 13:43:35.938388279 +0000 UTC m=+1091.712179318" observedRunningTime="2025-10-09 13:43:45.961120669 +0000 UTC m=+1101.734911708" watchObservedRunningTime="2025-10-09 13:43:45.963524173 +0000 UTC m=+1101.737315212" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.983518 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" podStartSLOduration=12.196785862 podStartE2EDuration="27.983494843s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.085407206 +0000 UTC m=+1075.859198245" lastFinishedPulling="2025-10-09 13:43:35.872116187 +0000 UTC m=+1091.645907226" observedRunningTime="2025-10-09 13:43:45.97811311 +0000 UTC m=+1101.751904149" watchObservedRunningTime="2025-10-09 13:43:45.983494843 +0000 UTC m=+1101.757285882" Oct 09 13:43:45 crc kubenswrapper[4762]: I1009 13:43:45.999500 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-8qmk9" podStartSLOduration=11.89299632 podStartE2EDuration="27.999484388s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:19.762395524 +0000 UTC m=+1075.536186563" lastFinishedPulling="2025-10-09 13:43:35.868883592 +0000 UTC m=+1091.642674631" observedRunningTime="2025-10-09 13:43:45.995587964 +0000 UTC m=+1101.769379023" watchObservedRunningTime="2025-10-09 13:43:45.999484388 +0000 UTC m=+1101.773275427" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.021946 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-b2j7z" podStartSLOduration=12.236428564 podStartE2EDuration="28.021926974s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.085785186 +0000 UTC m=+1075.859576225" lastFinishedPulling="2025-10-09 13:43:35.871283596 +0000 UTC m=+1091.645074635" observedRunningTime="2025-10-09 13:43:46.021281877 +0000 UTC m=+1101.795072936" watchObservedRunningTime="2025-10-09 13:43:46.021926974 +0000 UTC m=+1101.795718013" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.040347 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" podStartSLOduration=6.934669034 podStartE2EDuration="28.040332553s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.314540734 +0000 UTC m=+1076.088331773" lastFinishedPulling="2025-10-09 13:43:41.420204253 +0000 UTC m=+1097.193995292" observedRunningTime="2025-10-09 13:43:46.03907619 +0000 UTC m=+1101.812867229" watchObservedRunningTime="2025-10-09 13:43:46.040332553 +0000 UTC m=+1101.814123592" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.066323 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-7g4mb" podStartSLOduration=12.251121306 podStartE2EDuration="28.066303013s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.060956897 +0000 UTC m=+1075.834747936" lastFinishedPulling="2025-10-09 13:43:35.876138604 +0000 UTC m=+1091.649929643" observedRunningTime="2025-10-09 13:43:46.0639462 +0000 UTC m=+1101.837737249" watchObservedRunningTime="2025-10-09 13:43:46.066303013 +0000 UTC m=+1101.840094052" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.697006 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" event={"ID":"2bc7bc27-a390-4830-88cc-2a94e1326a09","Type":"ContainerStarted","Data":"f8a849698d8a2779281f173d17d00f3989213dd9d96c3087aa99ef1f45b1b4fc"} Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.697410 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.698876 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" event={"ID":"4141c889-724a-4a6f-886c-d1b6fa852d0f","Type":"ContainerStarted","Data":"583f935aad48d1d39a1010021e3af6ef25dfa5c344f3757fc3fa70c345a5cded"} Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.700740 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.702321 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-pnlww" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.716627 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" podStartSLOduration=2.931818245 podStartE2EDuration="28.716609111s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.506221817 +0000 UTC m=+1076.280012856" lastFinishedPulling="2025-10-09 13:43:46.291012683 +0000 UTC m=+1102.064803722" observedRunningTime="2025-10-09 13:43:46.712105831 +0000 UTC m=+1102.485896870" watchObservedRunningTime="2025-10-09 13:43:46.716609111 +0000 UTC m=+1102.490400150" Oct 09 13:43:46 crc kubenswrapper[4762]: I1009 13:43:46.746333 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" podStartSLOduration=3.526617837 podStartE2EDuration="28.746312691s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.31969342 +0000 UTC m=+1076.093484459" lastFinishedPulling="2025-10-09 13:43:45.539388264 +0000 UTC m=+1101.313179313" observedRunningTime="2025-10-09 13:43:46.744478181 +0000 UTC m=+1102.518269220" watchObservedRunningTime="2025-10-09 13:43:46.746312691 +0000 UTC m=+1102.520103730" Oct 09 13:43:48 crc kubenswrapper[4762]: I1009 13:43:48.857512 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:49 crc kubenswrapper[4762]: I1009 13:43:49.004115 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-qz69s" Oct 09 13:43:49 crc kubenswrapper[4762]: I1009 13:43:49.129027 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-664664cb68-mk2fh" Oct 09 13:43:49 crc kubenswrapper[4762]: I1009 13:43:49.176457 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-qv9zg" Oct 09 13:43:49 crc kubenswrapper[4762]: I1009 13:43:49.388301 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-xgnqp" Oct 09 13:43:49 crc kubenswrapper[4762]: I1009 13:43:49.718745 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-646675d848-rx4k9" Oct 09 13:43:51 crc kubenswrapper[4762]: I1009 13:43:51.967202 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 13:43:53 crc kubenswrapper[4762]: I1009 13:43:53.754369 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" event={"ID":"0024a375-268b-4c89-ad32-2b3876e271af","Type":"ContainerStarted","Data":"fbc9c7d729511e9a04dbdc6c2657c6827cad2ef5e53c9933ff1ad794d308b46b"} Oct 09 13:43:53 crc kubenswrapper[4762]: I1009 13:43:53.755144 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:53 crc kubenswrapper[4762]: I1009 13:43:53.769284 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" podStartSLOduration=2.80636038 podStartE2EDuration="35.769263421s" podCreationTimestamp="2025-10-09 13:43:18 +0000 UTC" firstStartedPulling="2025-10-09 13:43:20.088350214 +0000 UTC m=+1075.862141253" lastFinishedPulling="2025-10-09 13:43:53.051253255 +0000 UTC m=+1108.825044294" observedRunningTime="2025-10-09 13:43:53.767791761 +0000 UTC m=+1109.541582810" watchObservedRunningTime="2025-10-09 13:43:53.769263421 +0000 UTC m=+1109.543054480" Oct 09 13:43:58 crc kubenswrapper[4762]: I1009 13:43:58.669816 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-59578bc799-tdnrr" Oct 09 13:43:58 crc kubenswrapper[4762]: I1009 13:43:58.860466 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6s6tx" Oct 09 13:43:59 crc kubenswrapper[4762]: I1009 13:43:59.275677 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85fd6d6f45-h6cz8" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.029347 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.031015 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.036883 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.037417 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-kpr5t" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.037996 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.041845 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.088532 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.129507 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.129575 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f5nr\" (UniqueName: \"kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.155495 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.157002 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.169494 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.185765 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.230712 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.230786 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.230823 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.230852 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f5nr\" (UniqueName: \"kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.230923 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7kmg\" (UniqueName: \"kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.232026 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.268875 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f5nr\" (UniqueName: \"kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr\") pod \"dnsmasq-dns-675f4bcbfc-ndjxc\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.331741 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.331797 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.331863 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7kmg\" (UniqueName: \"kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.333042 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.333620 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.350020 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.351378 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7kmg\" (UniqueName: \"kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg\") pod \"dnsmasq-dns-78dd6ddcc-wmdhb\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.473192 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.843880 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.916100 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" event={"ID":"f335f513-ab5c-436e-9390-84315674ff94","Type":"ContainerStarted","Data":"61539a3fc60a63b3995226166eef6a75cd0267fc7d082ae1b105df210eac37f2"} Oct 09 13:44:15 crc kubenswrapper[4762]: I1009 13:44:15.936313 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:15 crc kubenswrapper[4762]: W1009 13:44:15.940537 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42bb36e2_06db_4094_9888_fb1e75596d17.slice/crio-e3f52ab8681fd4be37ce7293a0b4e75e193567d453c8af6c1c59d30aa514c46c WatchSource:0}: Error finding container e3f52ab8681fd4be37ce7293a0b4e75e193567d453c8af6c1c59d30aa514c46c: Status 404 returned error can't find the container with id e3f52ab8681fd4be37ce7293a0b4e75e193567d453c8af6c1c59d30aa514c46c Oct 09 13:44:16 crc kubenswrapper[4762]: I1009 13:44:16.868816 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:16 crc kubenswrapper[4762]: I1009 13:44:16.898214 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:44:16 crc kubenswrapper[4762]: I1009 13:44:16.899659 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:16 crc kubenswrapper[4762]: I1009 13:44:16.910118 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:44:16 crc kubenswrapper[4762]: I1009 13:44:16.943048 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" event={"ID":"42bb36e2-06db-4094-9888-fb1e75596d17","Type":"ContainerStarted","Data":"e3f52ab8681fd4be37ce7293a0b4e75e193567d453c8af6c1c59d30aa514c46c"} Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.065886 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.065987 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9697c\" (UniqueName: \"kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.066049 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.167846 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9697c\" (UniqueName: \"kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.167888 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.167990 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.168941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.169339 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.189397 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9697c\" (UniqueName: \"kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c\") pod \"dnsmasq-dns-666b6646f7-cmztv\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.230476 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.602603 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.641925 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.644689 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.682413 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.787572 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.787693 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6qmw\" (UniqueName: \"kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.787785 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.798511 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.888811 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6qmw\" (UniqueName: \"kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.888904 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.888969 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.890004 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.890018 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.944739 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6qmw\" (UniqueName: \"kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw\") pod \"dnsmasq-dns-57d769cc4f-k5xnq\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.960713 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" event={"ID":"9935b4b6-437d-4d16-951c-a36c15ba5257","Type":"ContainerStarted","Data":"0c365b189c17e144b42b804ec377c2b9622fdb57862d6d72de74b7e2402598a6"} Oct 09 13:44:17 crc kubenswrapper[4762]: I1009 13:44:17.983853 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.037858 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.040685 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.048694 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.050342 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.050530 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gp5q9" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.051714 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.051939 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.052058 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.054743 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.067843 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.193676 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194041 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194091 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194130 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194161 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194181 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194200 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194228 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t98p\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194247 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194276 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.194304 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295151 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295207 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295233 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t98p\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295258 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295280 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295307 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295350 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295380 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295406 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295424 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.295441 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.296024 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.296304 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.296043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.296499 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.297276 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.297406 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.299230 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.301114 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.303734 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.317583 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.319975 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t98p\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.341156 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.416322 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.512215 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.795441 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.796955 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.799748 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.799748 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.800038 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.800201 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mfjft" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.800317 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.800413 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.805389 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.815778 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.909071 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.912978 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913048 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913120 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913157 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913203 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913293 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913344 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913387 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6txz\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.913451 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:18 crc kubenswrapper[4762]: I1009 13:44:18.989549 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" event={"ID":"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3","Type":"ContainerStarted","Data":"997feb87c4d79ebf88a3b2efd047b6a7cdff1693b404050db04bb6283ef76d4e"} Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014443 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014504 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014523 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014543 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014565 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014580 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014598 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014619 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014650 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014674 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6txz\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.014694 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.016544 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.016921 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.017105 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.017202 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.017287 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.017784 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.022118 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.023732 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.024397 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.024690 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.036224 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6txz\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.053610 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:44:19 crc kubenswrapper[4762]: W1009 13:44:19.063827 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ca3d4c1_b9e5_4443_8102_7739602cbd2f.slice/crio-775e620beb1c7d647ce166687e7f59bd5039abd62eae5c699def4466175fda6d WatchSource:0}: Error finding container 775e620beb1c7d647ce166687e7f59bd5039abd62eae5c699def4466175fda6d: Status 404 returned error can't find the container with id 775e620beb1c7d647ce166687e7f59bd5039abd62eae5c699def4466175fda6d Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.069252 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.137002 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.797170 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.988941 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerStarted","Data":"3d9a5ef4476cae012e05302d7f5d02db3213d38e25f45c4a0cebd9b3551c884d"} Oct 09 13:44:19 crc kubenswrapper[4762]: I1009 13:44:19.993282 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerStarted","Data":"775e620beb1c7d647ce166687e7f59bd5039abd62eae5c699def4466175fda6d"} Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.372768 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.376513 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.386789 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.386854 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.387168 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.387296 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.387958 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-qcr4p" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.393250 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.393824 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.475556 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.475626 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479514 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479578 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479618 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r65vn\" (UniqueName: \"kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479705 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479730 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479768 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.479805 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590406 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590488 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r65vn\" (UniqueName: \"kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590535 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590561 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590591 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590618 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590679 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.590709 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.591094 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.592607 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.594139 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.595030 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.596727 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.601000 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.601019 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.611716 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r65vn\" (UniqueName: \"kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.619481 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.620019 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " pod="openstack/openstack-galera-0" Oct 09 13:44:20 crc kubenswrapper[4762]: I1009 13:44:20.727041 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.342777 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.579462 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.581335 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.584950 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.585401 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.585829 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-fshjw" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.586185 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.596484 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713336 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713388 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713417 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713438 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713479 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713535 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713556 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8k9m\" (UniqueName: \"kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713584 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.713618 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.814956 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815010 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815035 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815076 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815146 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8k9m\" (UniqueName: \"kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815173 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815210 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.815312 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.817375 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.817701 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.817708 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.817755 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.818239 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.834323 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.834549 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.849294 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8k9m\" (UniqueName: \"kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.863432 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.863619 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.864747 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.876229 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.882613 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.882927 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-v74sc" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.886010 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.895840 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 13:44:21 crc kubenswrapper[4762]: I1009 13:44:21.924597 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.025576 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.025801 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.025832 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.025893 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htmw5\" (UniqueName: \"kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.025922 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.127268 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.127340 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.127393 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htmw5\" (UniqueName: \"kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.127428 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.127504 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.128652 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.129204 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.144171 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.144254 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.147711 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htmw5\" (UniqueName: \"kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5\") pod \"memcached-0\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " pod="openstack/memcached-0" Oct 09 13:44:22 crc kubenswrapper[4762]: I1009 13:44:22.256399 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 13:44:23 crc kubenswrapper[4762]: I1009 13:44:23.908496 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:44:23 crc kubenswrapper[4762]: I1009 13:44:23.910848 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:44:23 crc kubenswrapper[4762]: I1009 13:44:23.914784 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jdds6" Oct 09 13:44:23 crc kubenswrapper[4762]: I1009 13:44:23.919192 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:44:24 crc kubenswrapper[4762]: I1009 13:44:24.073328 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5lq4\" (UniqueName: \"kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4\") pod \"kube-state-metrics-0\" (UID: \"ef0320a3-ddc0-42cb-9042-33b4b77994ff\") " pod="openstack/kube-state-metrics-0" Oct 09 13:44:24 crc kubenswrapper[4762]: I1009 13:44:24.176932 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5lq4\" (UniqueName: \"kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4\") pod \"kube-state-metrics-0\" (UID: \"ef0320a3-ddc0-42cb-9042-33b4b77994ff\") " pod="openstack/kube-state-metrics-0" Oct 09 13:44:24 crc kubenswrapper[4762]: I1009 13:44:24.195068 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5lq4\" (UniqueName: \"kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4\") pod \"kube-state-metrics-0\" (UID: \"ef0320a3-ddc0-42cb-9042-33b4b77994ff\") " pod="openstack/kube-state-metrics-0" Oct 09 13:44:24 crc kubenswrapper[4762]: I1009 13:44:24.241598 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:44:26 crc kubenswrapper[4762]: W1009 13:44:26.755974 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode38c807b_a632_4a96_b228_c879b07fd461.slice/crio-e9658bd7dbbbb81ae32e84f83cd68312caed346a1942cbb68070ed6bc06ac419 WatchSource:0}: Error finding container e9658bd7dbbbb81ae32e84f83cd68312caed346a1942cbb68070ed6bc06ac419: Status 404 returned error can't find the container with id e9658bd7dbbbb81ae32e84f83cd68312caed346a1942cbb68070ed6bc06ac419 Oct 09 13:44:27 crc kubenswrapper[4762]: I1009 13:44:27.108973 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerStarted","Data":"e9658bd7dbbbb81ae32e84f83cd68312caed346a1942cbb68070ed6bc06ac419"} Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.085425 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.086585 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.100117 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.100431 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.100684 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tvf4n" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.118889 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.121093 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.128928 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.140416 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.252989 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253071 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253109 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253138 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253213 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253278 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253316 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5wkw\" (UniqueName: \"kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253340 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253377 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlw5s\" (UniqueName: \"kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253405 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253565 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253621 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.253730 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369599 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369668 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369690 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369708 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369737 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369760 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5wkw\" (UniqueName: \"kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369795 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369822 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlw5s\" (UniqueName: \"kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369862 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369876 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.369897 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372029 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372165 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372237 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372282 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372340 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372385 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.372389 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.376511 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.378002 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.382553 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.391447 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.393197 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5wkw\" (UniqueName: \"kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw\") pod \"ovn-controller-nknqr\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.396977 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlw5s\" (UniqueName: \"kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s\") pod \"ovn-controller-ovs-4rsdz\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.417902 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr" Oct 09 13:44:28 crc kubenswrapper[4762]: I1009 13:44:28.447475 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.964273 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.966081 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.969453 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.969733 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.970151 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.972511 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.975947 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-vdv5l" Oct 09 13:44:29 crc kubenswrapper[4762]: I1009 13:44:29.986279 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.094737 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.094816 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.094847 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.094972 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.095063 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f92mx\" (UniqueName: \"kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.095094 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.095114 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.095140 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196242 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f92mx\" (UniqueName: \"kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196308 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196332 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196353 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196392 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196411 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196426 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.196454 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.197034 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.197812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.197941 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.198970 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.203362 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.204075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.207176 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.215044 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f92mx\" (UniqueName: \"kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.221328 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:30 crc kubenswrapper[4762]: I1009 13:44:30.290627 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.559843 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.561329 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.566310 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-55mlx" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.566624 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.566926 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.567182 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.576118 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.720938 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721005 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721053 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721110 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721170 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721197 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721220 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgkz4\" (UniqueName: \"kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.721243 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.822946 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823021 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823039 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823057 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgkz4\" (UniqueName: \"kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823074 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823099 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823129 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.823161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.824751 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.824828 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.825060 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.825951 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.834124 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.834261 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.834522 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.850123 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgkz4\" (UniqueName: \"kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.860135 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:31 crc kubenswrapper[4762]: I1009 13:44:31.890002 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 13:44:52 crc kubenswrapper[4762]: E1009 13:44:52.420739 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 09 13:44:52 crc kubenswrapper[4762]: E1009 13:44:52.421415 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c6txz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(c3841f71-5204-469f-b755-e030281725d1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:52 crc kubenswrapper[4762]: E1009 13:44:52.422623 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c3841f71-5204-469f-b755-e030281725d1" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.229288 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.229529 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9697c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-cmztv_openstack(9935b4b6-437d-4d16-951c-a36c15ba5257): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.230951 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.297443 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.297685 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g7kmg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-wmdhb_openstack(42bb36e2-06db-4094-9888-fb1e75596d17): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.299856 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" podUID="42bb36e2-06db-4094-9888-fb1e75596d17" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.308242 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c3841f71-5204-469f-b755-e030281725d1" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.311987 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.384404 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.384931 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7t98p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(0ca3d4c1-b9e5-4443-8102-7739602cbd2f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.388653 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.456442 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.456580 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8f5nr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-ndjxc_openstack(f335f513-ab5c-436e-9390-84315674ff94): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.457838 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" podUID="f335f513-ab5c-436e-9390-84315674ff94" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.565671 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.565845 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n6qmw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-k5xnq_openstack(cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:53 crc kubenswrapper[4762]: E1009 13:44:53.566962 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" Oct 09 13:44:54 crc kubenswrapper[4762]: E1009 13:44:54.314790 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" Oct 09 13:44:54 crc kubenswrapper[4762]: E1009 13:44:54.315615 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" Oct 09 13:44:55 crc kubenswrapper[4762]: E1009 13:44:55.176436 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Oct 09 13:44:55 crc kubenswrapper[4762]: E1009 13:44:55.176594 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r65vn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(e38c807b-a632-4a96-b228-c879b07fd461): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:44:55 crc kubenswrapper[4762]: E1009 13:44:55.177735 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="e38c807b-a632-4a96-b228-c879b07fd461" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.320286 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.322276 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" event={"ID":"42bb36e2-06db-4094-9888-fb1e75596d17","Type":"ContainerDied","Data":"e3f52ab8681fd4be37ce7293a0b4e75e193567d453c8af6c1c59d30aa514c46c"} Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.323567 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" event={"ID":"f335f513-ab5c-436e-9390-84315674ff94","Type":"ContainerDied","Data":"61539a3fc60a63b3995226166eef6a75cd0267fc7d082ae1b105df210eac37f2"} Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.323597 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61539a3fc60a63b3995226166eef6a75cd0267fc7d082ae1b105df210eac37f2" Oct 09 13:44:55 crc kubenswrapper[4762]: E1009 13:44:55.326803 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="e38c807b-a632-4a96-b228-c879b07fd461" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.331762 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.482729 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f5nr\" (UniqueName: \"kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr\") pod \"f335f513-ab5c-436e-9390-84315674ff94\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.482849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config\") pod \"42bb36e2-06db-4094-9888-fb1e75596d17\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.482915 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7kmg\" (UniqueName: \"kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg\") pod \"42bb36e2-06db-4094-9888-fb1e75596d17\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.483036 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc\") pod \"42bb36e2-06db-4094-9888-fb1e75596d17\" (UID: \"42bb36e2-06db-4094-9888-fb1e75596d17\") " Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.483086 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config\") pod \"f335f513-ab5c-436e-9390-84315674ff94\" (UID: \"f335f513-ab5c-436e-9390-84315674ff94\") " Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.483817 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42bb36e2-06db-4094-9888-fb1e75596d17" (UID: "42bb36e2-06db-4094-9888-fb1e75596d17"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.483976 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config" (OuterVolumeSpecName: "config") pod "f335f513-ab5c-436e-9390-84315674ff94" (UID: "f335f513-ab5c-436e-9390-84315674ff94"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.484071 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config" (OuterVolumeSpecName: "config") pod "42bb36e2-06db-4094-9888-fb1e75596d17" (UID: "42bb36e2-06db-4094-9888-fb1e75596d17"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.494740 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr" (OuterVolumeSpecName: "kube-api-access-8f5nr") pod "f335f513-ab5c-436e-9390-84315674ff94" (UID: "f335f513-ab5c-436e-9390-84315674ff94"). InnerVolumeSpecName "kube-api-access-8f5nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.495247 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg" (OuterVolumeSpecName: "kube-api-access-g7kmg") pod "42bb36e2-06db-4094-9888-fb1e75596d17" (UID: "42bb36e2-06db-4094-9888-fb1e75596d17"). InnerVolumeSpecName "kube-api-access-g7kmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.499175 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.585193 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7kmg\" (UniqueName: \"kubernetes.io/projected/42bb36e2-06db-4094-9888-fb1e75596d17-kube-api-access-g7kmg\") on node \"crc\" DevicePath \"\"" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.585242 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.585251 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f335f513-ab5c-436e-9390-84315674ff94-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.585262 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f5nr\" (UniqueName: \"kubernetes.io/projected/f335f513-ab5c-436e-9390-84315674ff94-kube-api-access-8f5nr\") on node \"crc\" DevicePath \"\"" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.585273 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42bb36e2-06db-4094-9888-fb1e75596d17-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.775224 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:44:55 crc kubenswrapper[4762]: W1009 13:44:55.776781 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef0320a3_ddc0_42cb_9042_33b4b77994ff.slice/crio-d5cc3e0ce1a759c00b21f89bcbc61168b82aa52e8425afa7df6ed9b8da450f04 WatchSource:0}: Error finding container d5cc3e0ce1a759c00b21f89bcbc61168b82aa52e8425afa7df6ed9b8da450f04: Status 404 returned error can't find the container with id d5cc3e0ce1a759c00b21f89bcbc61168b82aa52e8425afa7df6ed9b8da450f04 Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.782609 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.875461 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:44:55 crc kubenswrapper[4762]: W1009 13:44:55.875958 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde9e8701_0980_4e1d_beb6_bc897f6a3e5f.slice/crio-afadb27054548ec5c1a1ea1a13baf54bdf344c50057a7d9b08e7e7f59856187c WatchSource:0}: Error finding container afadb27054548ec5c1a1ea1a13baf54bdf344c50057a7d9b08e7e7f59856187c: Status 404 returned error can't find the container with id afadb27054548ec5c1a1ea1a13baf54bdf344c50057a7d9b08e7e7f59856187c Oct 09 13:44:55 crc kubenswrapper[4762]: I1009 13:44:55.910711 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:44:55 crc kubenswrapper[4762]: W1009 13:44:55.912844 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d5e35ba_6450_49d9_907a_8a4f879a1b0f.slice/crio-40ab3abaf5c944fa37f1a8226f9533563b7062110e452f079c2892753e6cee7a WatchSource:0}: Error finding container 40ab3abaf5c944fa37f1a8226f9533563b7062110e452f079c2892753e6cee7a: Status 404 returned error can't find the container with id 40ab3abaf5c944fa37f1a8226f9533563b7062110e452f079c2892753e6cee7a Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.331199 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerStarted","Data":"afadb27054548ec5c1a1ea1a13baf54bdf344c50057a7d9b08e7e7f59856187c"} Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.332335 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerStarted","Data":"c7a9da66efad426817ea74dbc6681a4f39048959f992f44b2f18582d8ac3a70e"} Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.333305 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"920bd70a-a807-4848-baf7-5bfc033838ee","Type":"ContainerStarted","Data":"81d25646935e58f5833b53ef70abc85a4cc338babc0b018008136ef95aa4706a"} Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.334235 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr" event={"ID":"3d5e35ba-6450-49d9-907a-8a4f879a1b0f","Type":"ContainerStarted","Data":"40ab3abaf5c944fa37f1a8226f9533563b7062110e452f079c2892753e6cee7a"} Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.335567 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ef0320a3-ddc0-42cb-9042-33b4b77994ff","Type":"ContainerStarted","Data":"d5cc3e0ce1a759c00b21f89bcbc61168b82aa52e8425afa7df6ed9b8da450f04"} Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.335596 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ndjxc" Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.335604 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-wmdhb" Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.394400 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.401600 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-wmdhb"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.431604 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.438944 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ndjxc"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.549690 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.899982 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.975093 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42bb36e2-06db-4094-9888-fb1e75596d17" path="/var/lib/kubelet/pods/42bb36e2-06db-4094-9888-fb1e75596d17/volumes" Oct 09 13:44:56 crc kubenswrapper[4762]: I1009 13:44:56.975544 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f335f513-ab5c-436e-9390-84315674ff94" path="/var/lib/kubelet/pods/f335f513-ab5c-436e-9390-84315674ff94/volumes" Oct 09 13:44:57 crc kubenswrapper[4762]: I1009 13:44:57.344358 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerStarted","Data":"5e79c6927b095750dce54ac6928fba74a4204a733b4c09a34ff738ab33af6f0a"} Oct 09 13:44:57 crc kubenswrapper[4762]: I1009 13:44:57.345596 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerStarted","Data":"b6c377c3a60adc4a6b1a248142e8579e2d3076e0550d7fe8a5f03267112dd2a7"} Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.148675 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p"] Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.151585 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.153736 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.154480 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.160718 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p"] Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.264322 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxv6m\" (UniqueName: \"kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.264433 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.264670 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.365617 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.365697 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxv6m\" (UniqueName: \"kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.365741 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.369449 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.383308 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.385320 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxv6m\" (UniqueName: \"kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m\") pod \"collect-profiles-29333625-ggp7p\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:00 crc kubenswrapper[4762]: I1009 13:45:00.487967 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:03 crc kubenswrapper[4762]: I1009 13:45:03.370403 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p"] Oct 09 13:45:04 crc kubenswrapper[4762]: I1009 13:45:04.413458 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" event={"ID":"c95067a5-19b0-4c94-9331-b820ca60432f","Type":"ContainerStarted","Data":"33dffa1a636d25e742d95d1f09209b2150b485b37bce467d2612def9b0161438"} Oct 09 13:45:05 crc kubenswrapper[4762]: I1009 13:45:05.449149 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 09 13:45:05 crc kubenswrapper[4762]: I1009 13:45:05.482015 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=37.911403667 podStartE2EDuration="44.481996068s" podCreationTimestamp="2025-10-09 13:44:21 +0000 UTC" firstStartedPulling="2025-10-09 13:44:55.799870877 +0000 UTC m=+1171.573661916" lastFinishedPulling="2025-10-09 13:45:02.370463278 +0000 UTC m=+1178.144254317" observedRunningTime="2025-10-09 13:45:05.477987771 +0000 UTC m=+1181.251778810" watchObservedRunningTime="2025-10-09 13:45:05.481996068 +0000 UTC m=+1181.255787097" Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.464720 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ef0320a3-ddc0-42cb-9042-33b4b77994ff","Type":"ContainerStarted","Data":"5b7d21cfa348f2da8a838bfcb268895e7e0fb5e13834121d2230a5e12208a9f4"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.465214 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.469127 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerStarted","Data":"0298fedbb6423e6e1f4c3a0b9c472563bf0d4a5e060e57f78e885a92bc20aaf1"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.471153 4762 generic.go:334] "Generic (PLEG): container finished" podID="c95067a5-19b0-4c94-9331-b820ca60432f" containerID="ef1b1c79ac5170da5cd3679764a9beb1b0772009b17ff33dca8c1980d5a4f560" exitCode=0 Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.471452 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" event={"ID":"c95067a5-19b0-4c94-9331-b820ca60432f","Type":"ContainerDied","Data":"ef1b1c79ac5170da5cd3679764a9beb1b0772009b17ff33dca8c1980d5a4f560"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.474158 4762 generic.go:334] "Generic (PLEG): container finished" podID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerID="9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b" exitCode=0 Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.474827 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerDied","Data":"9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.492988 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerStarted","Data":"6791b06acce090a85fe3858d54be64ff77dbb596394dad11205a3c2d3f6e66f2"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.494122 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=34.046275541 podStartE2EDuration="43.494101308s" podCreationTimestamp="2025-10-09 13:44:23 +0000 UTC" firstStartedPulling="2025-10-09 13:44:55.779657729 +0000 UTC m=+1171.553448768" lastFinishedPulling="2025-10-09 13:45:05.227483486 +0000 UTC m=+1181.001274535" observedRunningTime="2025-10-09 13:45:06.47872932 +0000 UTC m=+1182.252520359" watchObservedRunningTime="2025-10-09 13:45:06.494101308 +0000 UTC m=+1182.267892347" Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.497235 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"920bd70a-a807-4848-baf7-5bfc033838ee","Type":"ContainerStarted","Data":"c8d8d117d7a7cc7c3b031a470b0ee1bca2341ab9daabe576a4e456084be8e7f2"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.500055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr" event={"ID":"3d5e35ba-6450-49d9-907a-8a4f879a1b0f","Type":"ContainerStarted","Data":"3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.500350 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-nknqr" Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.504371 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerStarted","Data":"cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591"} Oct 09 13:45:06 crc kubenswrapper[4762]: I1009 13:45:06.534708 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-nknqr" podStartSLOduration=31.473829948 podStartE2EDuration="38.534691726s" podCreationTimestamp="2025-10-09 13:44:28 +0000 UTC" firstStartedPulling="2025-10-09 13:44:55.914710808 +0000 UTC m=+1171.688501847" lastFinishedPulling="2025-10-09 13:45:02.975572586 +0000 UTC m=+1178.749363625" observedRunningTime="2025-10-09 13:45:06.532763115 +0000 UTC m=+1182.306554174" watchObservedRunningTime="2025-10-09 13:45:06.534691726 +0000 UTC m=+1182.308482765" Oct 09 13:45:07 crc kubenswrapper[4762]: I1009 13:45:07.518803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerStarted","Data":"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6"} Oct 09 13:45:07 crc kubenswrapper[4762]: I1009 13:45:07.519407 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:45:07 crc kubenswrapper[4762]: I1009 13:45:07.519428 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerStarted","Data":"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c"} Oct 09 13:45:07 crc kubenswrapper[4762]: I1009 13:45:07.519479 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:45:07 crc kubenswrapper[4762]: I1009 13:45:07.541333 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-4rsdz" podStartSLOduration=34.339206627 podStartE2EDuration="39.54131398s" podCreationTimestamp="2025-10-09 13:44:28 +0000 UTC" firstStartedPulling="2025-10-09 13:44:56.893751519 +0000 UTC m=+1172.667542558" lastFinishedPulling="2025-10-09 13:45:02.095858872 +0000 UTC m=+1177.869649911" observedRunningTime="2025-10-09 13:45:07.540050678 +0000 UTC m=+1183.313841737" watchObservedRunningTime="2025-10-09 13:45:07.54131398 +0000 UTC m=+1183.315105019" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.525286 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerStarted","Data":"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3"} Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.674609 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.798663 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume\") pod \"c95067a5-19b0-4c94-9331-b820ca60432f\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.798806 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume\") pod \"c95067a5-19b0-4c94-9331-b820ca60432f\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.798929 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxv6m\" (UniqueName: \"kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m\") pod \"c95067a5-19b0-4c94-9331-b820ca60432f\" (UID: \"c95067a5-19b0-4c94-9331-b820ca60432f\") " Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.799522 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume" (OuterVolumeSpecName: "config-volume") pod "c95067a5-19b0-4c94-9331-b820ca60432f" (UID: "c95067a5-19b0-4c94-9331-b820ca60432f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.804776 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m" (OuterVolumeSpecName: "kube-api-access-qxv6m") pod "c95067a5-19b0-4c94-9331-b820ca60432f" (UID: "c95067a5-19b0-4c94-9331-b820ca60432f"). InnerVolumeSpecName "kube-api-access-qxv6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.807227 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c95067a5-19b0-4c94-9331-b820ca60432f" (UID: "c95067a5-19b0-4c94-9331-b820ca60432f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.900617 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxv6m\" (UniqueName: \"kubernetes.io/projected/c95067a5-19b0-4c94-9331-b820ca60432f-kube-api-access-qxv6m\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.900673 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c95067a5-19b0-4c94-9331-b820ca60432f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:08 crc kubenswrapper[4762]: I1009 13:45:08.900683 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c95067a5-19b0-4c94-9331-b820ca60432f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:09 crc kubenswrapper[4762]: I1009 13:45:09.533523 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" event={"ID":"c95067a5-19b0-4c94-9331-b820ca60432f","Type":"ContainerDied","Data":"33dffa1a636d25e742d95d1f09209b2150b485b37bce467d2612def9b0161438"} Oct 09 13:45:09 crc kubenswrapper[4762]: I1009 13:45:09.534919 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33dffa1a636d25e742d95d1f09209b2150b485b37bce467d2612def9b0161438" Oct 09 13:45:09 crc kubenswrapper[4762]: I1009 13:45:09.533547 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p" Oct 09 13:45:10 crc kubenswrapper[4762]: I1009 13:45:10.543725 4762 generic.go:334] "Generic (PLEG): container finished" podID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerID="6791b06acce090a85fe3858d54be64ff77dbb596394dad11205a3c2d3f6e66f2" exitCode=0 Oct 09 13:45:10 crc kubenswrapper[4762]: I1009 13:45:10.543831 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerDied","Data":"6791b06acce090a85fe3858d54be64ff77dbb596394dad11205a3c2d3f6e66f2"} Oct 09 13:45:11 crc kubenswrapper[4762]: I1009 13:45:11.554203 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerStarted","Data":"89d0be629bcb0cac06f9243b7e363d8a395f63ea55dd81e464a984f45d7e31cc"} Oct 09 13:45:12 crc kubenswrapper[4762]: I1009 13:45:12.258895 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.249965 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.254883 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.317385 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:14 crc kubenswrapper[4762]: E1009 13:45:14.317722 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c95067a5-19b0-4c94-9331-b820ca60432f" containerName="collect-profiles" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.317740 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c95067a5-19b0-4c94-9331-b820ca60432f" containerName="collect-profiles" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.317930 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c95067a5-19b0-4c94-9331-b820ca60432f" containerName="collect-profiles" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.318753 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.331674 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.384493 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.384567 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.384677 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p72c\" (UniqueName: \"kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.486799 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p72c\" (UniqueName: \"kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.486916 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.486958 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.488154 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.488227 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.511574 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p72c\" (UniqueName: \"kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c\") pod \"dnsmasq-dns-7cb5889db5-v5jd5\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.598071 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerStarted","Data":"a8a5046654c8bc5cfa5fef4fa4b6f0c7fc095d9255b7f318a934a8f57d41e38e"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.599675 4762 generic.go:334] "Generic (PLEG): container finished" podID="9935b4b6-437d-4d16-951c-a36c15ba5257" containerID="debbb243edd04dad867565827b15b04756a9fa31f122e53edf1e1f46f31c74d4" exitCode=0 Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.599737 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" event={"ID":"9935b4b6-437d-4d16-951c-a36c15ba5257","Type":"ContainerDied","Data":"debbb243edd04dad867565827b15b04756a9fa31f122e53edf1e1f46f31c74d4"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.601441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerStarted","Data":"bfd114b69745f91fa863504ab2154e63a593800c553e012f664f20178ef3e182"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.605915 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerStarted","Data":"1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.608334 4762 generic.go:334] "Generic (PLEG): container finished" podID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" containerID="daf6d7e235ca94ad2cb9db06fe720a1cc7f3b3cf63d0c837f680c02ea90ad491" exitCode=0 Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.608384 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" event={"ID":"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3","Type":"ContainerDied","Data":"daf6d7e235ca94ad2cb9db06fe720a1cc7f3b3cf63d0c837f680c02ea90ad491"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.610496 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerStarted","Data":"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1"} Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.623773 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=29.551353411 podStartE2EDuration="46.623750812s" podCreationTimestamp="2025-10-09 13:44:28 +0000 UTC" firstStartedPulling="2025-10-09 13:44:56.560269149 +0000 UTC m=+1172.334060188" lastFinishedPulling="2025-10-09 13:45:13.63266655 +0000 UTC m=+1189.406457589" observedRunningTime="2025-10-09 13:45:14.620157256 +0000 UTC m=+1190.393948295" watchObservedRunningTime="2025-10-09 13:45:14.623750812 +0000 UTC m=+1190.397541861" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.641205 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.651979 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=47.794542349 podStartE2EDuration="54.651952821s" podCreationTimestamp="2025-10-09 13:44:20 +0000 UTC" firstStartedPulling="2025-10-09 13:44:55.5109452 +0000 UTC m=+1171.284736239" lastFinishedPulling="2025-10-09 13:45:02.368355672 +0000 UTC m=+1178.142146711" observedRunningTime="2025-10-09 13:45:14.644588135 +0000 UTC m=+1190.418379174" watchObservedRunningTime="2025-10-09 13:45:14.651952821 +0000 UTC m=+1190.425743870" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.667859 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=27.278988465 podStartE2EDuration="44.667842533s" podCreationTimestamp="2025-10-09 13:44:30 +0000 UTC" firstStartedPulling="2025-10-09 13:44:55.878322171 +0000 UTC m=+1171.652113210" lastFinishedPulling="2025-10-09 13:45:13.267176239 +0000 UTC m=+1189.040967278" observedRunningTime="2025-10-09 13:45:14.665679606 +0000 UTC m=+1190.439470665" watchObservedRunningTime="2025-10-09 13:45:14.667842533 +0000 UTC m=+1190.441633562" Oct 09 13:45:14 crc kubenswrapper[4762]: E1009 13:45:14.862843 4762 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 09 13:45:14 crc kubenswrapper[4762]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 09 13:45:14 crc kubenswrapper[4762]: > podSandboxID="997feb87c4d79ebf88a3b2efd047b6a7cdff1693b404050db04bb6283ef76d4e" Oct 09 13:45:14 crc kubenswrapper[4762]: E1009 13:45:14.863309 4762 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 09 13:45:14 crc kubenswrapper[4762]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n6qmw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-k5xnq_openstack(cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 09 13:45:14 crc kubenswrapper[4762]: > logger="UnhandledError" Oct 09 13:45:14 crc kubenswrapper[4762]: E1009 13:45:14.868723 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.899587 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.996349 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config\") pod \"9935b4b6-437d-4d16-951c-a36c15ba5257\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.996472 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9697c\" (UniqueName: \"kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c\") pod \"9935b4b6-437d-4d16-951c-a36c15ba5257\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " Oct 09 13:45:14 crc kubenswrapper[4762]: I1009 13:45:14.996507 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc\") pod \"9935b4b6-437d-4d16-951c-a36c15ba5257\" (UID: \"9935b4b6-437d-4d16-951c-a36c15ba5257\") " Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.003791 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c" (OuterVolumeSpecName: "kube-api-access-9697c") pod "9935b4b6-437d-4d16-951c-a36c15ba5257" (UID: "9935b4b6-437d-4d16-951c-a36c15ba5257"). InnerVolumeSpecName "kube-api-access-9697c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.018579 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config" (OuterVolumeSpecName: "config") pod "9935b4b6-437d-4d16-951c-a36c15ba5257" (UID: "9935b4b6-437d-4d16-951c-a36c15ba5257"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.019320 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9935b4b6-437d-4d16-951c-a36c15ba5257" (UID: "9935b4b6-437d-4d16-951c-a36c15ba5257"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.098004 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9697c\" (UniqueName: \"kubernetes.io/projected/9935b4b6-437d-4d16-951c-a36c15ba5257-kube-api-access-9697c\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.098034 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.098043 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9935b4b6-437d-4d16-951c-a36c15ba5257-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.183109 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:15 crc kubenswrapper[4762]: W1009 13:45:15.183834 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc82f1ee5_3ab3_4672_b5a6_bc0e94a795a6.slice/crio-992bc2e251746d601d64a53f7d285a6e53b7299280a2dde2ae89b6c4b05b649f WatchSource:0}: Error finding container 992bc2e251746d601d64a53f7d285a6e53b7299280a2dde2ae89b6c4b05b649f: Status 404 returned error can't find the container with id 992bc2e251746d601d64a53f7d285a6e53b7299280a2dde2ae89b6c4b05b649f Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.291801 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.291854 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.330134 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.388946 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:45:15 crc kubenswrapper[4762]: E1009 13:45:15.389264 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" containerName="init" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.389280 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" containerName="init" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.389465 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" containerName="init" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.394681 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.396429 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.397478 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.397577 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.397966 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-fq9bh" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.405860 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.504106 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.504211 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67d79\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.504251 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.504331 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.504400 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606144 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67d79\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606272 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606333 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606356 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: E1009 13:45:15.606831 4762 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 09 13:45:15 crc kubenswrapper[4762]: E1009 13:45:15.606870 4762 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.606924 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: E1009 13:45:15.606940 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift podName:1477cefa-db6e-45d7-adb1-d7c1369a3610 nodeName:}" failed. No retries permitted until 2025-10-09 13:45:16.106914933 +0000 UTC m=+1191.880705972 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift") pod "swift-storage-0" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610") : configmap "swift-ring-files" not found Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.607051 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.607065 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.618398 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" event={"ID":"9935b4b6-437d-4d16-951c-a36c15ba5257","Type":"ContainerDied","Data":"0c365b189c17e144b42b804ec377c2b9622fdb57862d6d72de74b7e2402598a6"} Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.618442 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-cmztv" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.618455 4762 scope.go:117] "RemoveContainer" containerID="debbb243edd04dad867565827b15b04756a9fa31f122e53edf1e1f46f31c74d4" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.619955 4762 generic.go:334] "Generic (PLEG): container finished" podID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerID="367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec" exitCode=0 Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.619982 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" event={"ID":"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6","Type":"ContainerDied","Data":"367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec"} Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.620016 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" event={"ID":"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6","Type":"ContainerStarted","Data":"992bc2e251746d601d64a53f7d285a6e53b7299280a2dde2ae89b6c4b05b649f"} Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.626471 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67d79\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.630231 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.661100 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.743980 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.763153 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-cmztv"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.845362 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.878845 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.880289 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.886044 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.894063 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.927104 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-tdgxl"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.928157 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.930958 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.931005 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.932290 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.949797 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-tdgxl"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.998018 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:45:15 crc kubenswrapper[4762]: I1009 13:45:15.999319 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.005077 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.011803 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.011850 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.011901 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.011921 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.011989 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.012025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.012078 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hlb4\" (UniqueName: \"kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.012104 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.012945 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.013039 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.013072 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk5jn\" (UniqueName: \"kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.018489 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114784 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114854 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114906 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hlb4\" (UniqueName: \"kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114942 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114964 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.114989 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115016 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115045 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk5jn\" (UniqueName: \"kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115066 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115168 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:16 crc kubenswrapper[4762]: E1009 13:45:16.115393 4762 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 09 13:45:16 crc kubenswrapper[4762]: E1009 13:45:16.115407 4762 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 09 13:45:16 crc kubenswrapper[4762]: E1009 13:45:16.115454 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift podName:1477cefa-db6e-45d7-adb1-d7c1369a3610 nodeName:}" failed. No retries permitted until 2025-10-09 13:45:17.115440444 +0000 UTC m=+1192.889231483 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift") pod "swift-storage-0" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610") : configmap "swift-ring-files" not found Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115796 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115815 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cnq5\" (UniqueName: \"kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115843 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115866 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115898 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115926 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.115941 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.116054 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.116082 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.116428 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.116864 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.117288 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.119497 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.121561 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.121707 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.124137 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.130814 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk5jn\" (UniqueName: \"kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn\") pod \"swift-ring-rebalance-tdgxl\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.134498 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hlb4\" (UniqueName: \"kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4\") pod \"dnsmasq-dns-57d65f699f-xqzkv\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.204589 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218027 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218083 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218113 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cnq5\" (UniqueName: \"kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218154 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218292 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.218317 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.219892 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.220325 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.220899 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.225437 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.227877 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.246538 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cnq5\" (UniqueName: \"kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5\") pod \"ovn-controller-metrics-df85s\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.249753 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.289557 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.317804 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.325767 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.348829 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:45:16 crc kubenswrapper[4762]: E1009 13:45:16.349801 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" containerName="init" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.349821 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" containerName="init" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.350415 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" containerName="init" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.352779 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.356171 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.361567 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.426167 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc\") pod \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.426610 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6qmw\" (UniqueName: \"kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw\") pod \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.426672 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config\") pod \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\" (UID: \"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3\") " Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.427622 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.427734 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.427918 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh8dz\" (UniqueName: \"kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.428001 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.428025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.430454 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw" (OuterVolumeSpecName: "kube-api-access-n6qmw") pod "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" (UID: "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3"). InnerVolumeSpecName "kube-api-access-n6qmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.475142 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config" (OuterVolumeSpecName: "config") pod "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" (UID: "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.485631 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" (UID: "cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.529797 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh8dz\" (UniqueName: \"kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.529887 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.529914 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.530002 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.530068 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.530125 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6qmw\" (UniqueName: \"kubernetes.io/projected/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-kube-api-access-n6qmw\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.530139 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.530153 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.531144 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.532020 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.532757 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.533851 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.562500 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh8dz\" (UniqueName: \"kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz\") pod \"dnsmasq-dns-b8fbc5445-bwvnp\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.618244 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.637431 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" event={"ID":"e97796df-8a16-4687-b6c1-23b06accd1e7","Type":"ContainerStarted","Data":"baea267c0d40861da6a8239f04f4d397d783e23a9051327840f1c30d9e0d5bf2"} Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.667234 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" event={"ID":"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6","Type":"ContainerStarted","Data":"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6"} Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.668226 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.673159 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" event={"ID":"cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3","Type":"ContainerDied","Data":"997feb87c4d79ebf88a3b2efd047b6a7cdff1693b404050db04bb6283ef76d4e"} Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.673211 4762 scope.go:117] "RemoveContainer" containerID="daf6d7e235ca94ad2cb9db06fe720a1cc7f3b3cf63d0c837f680c02ea90ad491" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.673801 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-k5xnq" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.690099 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.692246 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" podStartSLOduration=2.692225678 podStartE2EDuration="2.692225678s" podCreationTimestamp="2025-10-09 13:45:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:16.690067152 +0000 UTC m=+1192.463858201" watchObservedRunningTime="2025-10-09 13:45:16.692225678 +0000 UTC m=+1192.466016727" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.872688 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.889786 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-k5xnq"] Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.891727 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.892504 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.985952 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9935b4b6-437d-4d16-951c-a36c15ba5257" path="/var/lib/kubelet/pods/9935b4b6-437d-4d16-951c-a36c15ba5257/volumes" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.986498 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3" path="/var/lib/kubelet/pods/cb6e7fc4-f16d-49a1-888a-ad4a5c1a23d3/volumes" Oct 09 13:45:16 crc kubenswrapper[4762]: I1009 13:45:16.995455 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-tdgxl"] Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.012461 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.039919 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:45:17 crc kubenswrapper[4762]: W1009 13:45:17.079278 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39780bc9_4ec7_4578_b64e_40c2a1bba06c.slice/crio-e78b96ed754c9e223cf93c384cfd32a3c3239d321ebceaf3c67c3080ef7e0eba WatchSource:0}: Error finding container e78b96ed754c9e223cf93c384cfd32a3c3239d321ebceaf3c67c3080ef7e0eba: Status 404 returned error can't find the container with id e78b96ed754c9e223cf93c384cfd32a3c3239d321ebceaf3c67c3080ef7e0eba Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.149970 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:17 crc kubenswrapper[4762]: E1009 13:45:17.150219 4762 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 09 13:45:17 crc kubenswrapper[4762]: E1009 13:45:17.150258 4762 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 09 13:45:17 crc kubenswrapper[4762]: E1009 13:45:17.150326 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift podName:1477cefa-db6e-45d7-adb1-d7c1369a3610 nodeName:}" failed. No retries permitted until 2025-10-09 13:45:19.150304929 +0000 UTC m=+1194.924095968 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift") pod "swift-storage-0" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610") : configmap "swift-ring-files" not found Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.402517 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.681470 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tdgxl" event={"ID":"44b61157-b706-44e2-9ca9-a5709e68f92c","Type":"ContainerStarted","Data":"2ff74bdec45ab1f2829ff66b92f63e9639903e954be91dfb0bad631370af9b5c"} Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.683681 4762 generic.go:334] "Generic (PLEG): container finished" podID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerID="886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4" exitCode=0 Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.683754 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" event={"ID":"e97796df-8a16-4687-b6c1-23b06accd1e7","Type":"ContainerDied","Data":"886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4"} Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.685053 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" event={"ID":"ee34933d-92fc-4723-b232-481e48b27ebf","Type":"ContainerStarted","Data":"160016cea90eb609a06808b538868a6cc195fe8aa12abf272fc370a0ce203279"} Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.688373 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df85s" event={"ID":"39780bc9-4ec7-4578-b64e-40c2a1bba06c","Type":"ContainerStarted","Data":"e78b96ed754c9e223cf93c384cfd32a3c3239d321ebceaf3c67c3080ef7e0eba"} Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.698900 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="dnsmasq-dns" containerID="cri-o://70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6" gracePeriod=10 Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.794989 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.980519 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.982167 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.985524 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.985605 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.987031 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-n2bj9" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.989405 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 09 13:45:17 crc kubenswrapper[4762]: I1009 13:45:17.995614 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103066 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103136 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103163 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103187 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw9n2\" (UniqueName: \"kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103210 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103234 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.103301 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: E1009 13:45:18.169992 4762 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.151:50558->38.102.83.151:37017: write tcp 38.102.83.151:50558->38.102.83.151:37017: write: broken pipe Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.204945 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205127 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205173 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205197 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205221 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw9n2\" (UniqueName: \"kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205252 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.205277 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.207278 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.208248 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.208566 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.211300 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.216436 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.220666 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.228625 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw9n2\" (UniqueName: \"kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2\") pod \"ovn-northd-0\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.323296 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.334797 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.510414 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc\") pod \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.510590 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config\") pod \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.510626 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p72c\" (UniqueName: \"kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c\") pod \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\" (UID: \"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6\") " Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.518284 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c" (OuterVolumeSpecName: "kube-api-access-7p72c") pod "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" (UID: "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6"). InnerVolumeSpecName "kube-api-access-7p72c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.567500 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config" (OuterVolumeSpecName: "config") pod "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" (UID: "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.573081 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" (UID: "c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.612568 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.612616 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p72c\" (UniqueName: \"kubernetes.io/projected/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-kube-api-access-7p72c\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.612652 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.714457 4762 generic.go:334] "Generic (PLEG): container finished" podID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerID="70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6" exitCode=0 Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.714510 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.714545 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" event={"ID":"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6","Type":"ContainerDied","Data":"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.714573 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5jd5" event={"ID":"c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6","Type":"ContainerDied","Data":"992bc2e251746d601d64a53f7d285a6e53b7299280a2dde2ae89b6c4b05b649f"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.714590 4762 scope.go:117] "RemoveContainer" containerID="70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.733061 4762 generic.go:334] "Generic (PLEG): container finished" podID="e38c807b-a632-4a96-b228-c879b07fd461" containerID="58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1" exitCode=0 Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.733190 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerDied","Data":"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.737746 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" event={"ID":"e97796df-8a16-4687-b6c1-23b06accd1e7","Type":"ContainerStarted","Data":"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.737859 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.742054 4762 generic.go:334] "Generic (PLEG): container finished" podID="ee34933d-92fc-4723-b232-481e48b27ebf" containerID="ef313c2cd5701a12d34f7ce3e2ab234a47acd9b4d50c85250f453a2b3c4cfda2" exitCode=0 Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.742125 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" event={"ID":"ee34933d-92fc-4723-b232-481e48b27ebf","Type":"ContainerDied","Data":"ef313c2cd5701a12d34f7ce3e2ab234a47acd9b4d50c85250f453a2b3c4cfda2"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.759819 4762 scope.go:117] "RemoveContainer" containerID="367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.760193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df85s" event={"ID":"39780bc9-4ec7-4578-b64e-40c2a1bba06c","Type":"ContainerStarted","Data":"832e14666bf13cb6c348c0af2b44c1f63087d249a79e7a68cc967f3268d56c21"} Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.804110 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.815586 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5jd5"] Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.819372 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" podStartSLOduration=3.819354282 podStartE2EDuration="3.819354282s" podCreationTimestamp="2025-10-09 13:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:18.807019836 +0000 UTC m=+1194.580810875" watchObservedRunningTime="2025-10-09 13:45:18.819354282 +0000 UTC m=+1194.593145321" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.838678 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.859527 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-df85s" podStartSLOduration=3.859499209 podStartE2EDuration="3.859499209s" podCreationTimestamp="2025-10-09 13:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:18.843725731 +0000 UTC m=+1194.617516790" watchObservedRunningTime="2025-10-09 13:45:18.859499209 +0000 UTC m=+1194.633290248" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.907578 4762 scope.go:117] "RemoveContainer" containerID="70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6" Oct 09 13:45:18 crc kubenswrapper[4762]: E1009 13:45:18.916012 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6\": container with ID starting with 70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6 not found: ID does not exist" containerID="70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.916113 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6"} err="failed to get container status \"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6\": rpc error: code = NotFound desc = could not find container \"70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6\": container with ID starting with 70e23d10bd165c86a501ebc577110b5d12858bba61ed0dcd70d8a3773a41a5f6 not found: ID does not exist" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.916151 4762 scope.go:117] "RemoveContainer" containerID="367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec" Oct 09 13:45:18 crc kubenswrapper[4762]: E1009 13:45:18.916660 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec\": container with ID starting with 367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec not found: ID does not exist" containerID="367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec" Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.916685 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec"} err="failed to get container status \"367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec\": rpc error: code = NotFound desc = could not find container \"367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec\": container with ID starting with 367fdeecab669fbc19f9deb8cd556da8681a8c7e0a55c6d2cb7f3228e94732ec not found: ID does not exist" Oct 09 13:45:18 crc kubenswrapper[4762]: W1009 13:45:18.932626 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39888a9b_c1cd_496e_b44e_a27212faac74.slice/crio-bebe6b7cbd2ad19cd9fc4c39b721ba76216c7c2cf923ba0ba6d7f8fff28445c8 WatchSource:0}: Error finding container bebe6b7cbd2ad19cd9fc4c39b721ba76216c7c2cf923ba0ba6d7f8fff28445c8: Status 404 returned error can't find the container with id bebe6b7cbd2ad19cd9fc4c39b721ba76216c7c2cf923ba0ba6d7f8fff28445c8 Oct 09 13:45:18 crc kubenswrapper[4762]: I1009 13:45:18.979187 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" path="/var/lib/kubelet/pods/c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6/volumes" Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.227030 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:19 crc kubenswrapper[4762]: E1009 13:45:19.227247 4762 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 09 13:45:19 crc kubenswrapper[4762]: E1009 13:45:19.227282 4762 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 09 13:45:19 crc kubenswrapper[4762]: E1009 13:45:19.227348 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift podName:1477cefa-db6e-45d7-adb1-d7c1369a3610 nodeName:}" failed. No retries permitted until 2025-10-09 13:45:23.227325622 +0000 UTC m=+1199.001116661 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift") pod "swift-storage-0" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610") : configmap "swift-ring-files" not found Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.784202 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerStarted","Data":"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd"} Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.787294 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" event={"ID":"ee34933d-92fc-4723-b232-481e48b27ebf","Type":"ContainerStarted","Data":"23bc8d382d6857bb6b1145361ed8df721c39db5896196c008d510fde383469bd"} Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.787440 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.789661 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerStarted","Data":"bebe6b7cbd2ad19cd9fc4c39b721ba76216c7c2cf923ba0ba6d7f8fff28445c8"} Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.810314 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371976.044483 podStartE2EDuration="1m0.810291671s" podCreationTimestamp="2025-10-09 13:44:19 +0000 UTC" firstStartedPulling="2025-10-09 13:44:26.759582808 +0000 UTC m=+1142.533373847" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:19.806083189 +0000 UTC m=+1195.579874228" watchObservedRunningTime="2025-10-09 13:45:19.810291671 +0000 UTC m=+1195.584082710" Oct 09 13:45:19 crc kubenswrapper[4762]: I1009 13:45:19.833890 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" podStartSLOduration=3.833867167 podStartE2EDuration="3.833867167s" podCreationTimestamp="2025-10-09 13:45:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:19.826482521 +0000 UTC m=+1195.600273560" watchObservedRunningTime="2025-10-09 13:45:19.833867167 +0000 UTC m=+1195.607658206" Oct 09 13:45:20 crc kubenswrapper[4762]: I1009 13:45:20.727808 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 09 13:45:20 crc kubenswrapper[4762]: I1009 13:45:20.730457 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 09 13:45:21 crc kubenswrapper[4762]: I1009 13:45:21.926090 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 09 13:45:21 crc kubenswrapper[4762]: I1009 13:45:21.926466 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 09 13:45:21 crc kubenswrapper[4762]: I1009 13:45:21.986846 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.826730 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tdgxl" event={"ID":"44b61157-b706-44e2-9ca9-a5709e68f92c","Type":"ContainerStarted","Data":"07fd9344064d49467a79ce1c38657c51dcfa85dbdc1652c4a2c59fb99501a1e0"} Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.829292 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerStarted","Data":"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a"} Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.829336 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerStarted","Data":"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e"} Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.848580 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-tdgxl" podStartSLOduration=3.14625028 podStartE2EDuration="7.848555913s" podCreationTimestamp="2025-10-09 13:45:15 +0000 UTC" firstStartedPulling="2025-10-09 13:45:17.016697519 +0000 UTC m=+1192.790488558" lastFinishedPulling="2025-10-09 13:45:21.719003152 +0000 UTC m=+1197.492794191" observedRunningTime="2025-10-09 13:45:22.847012373 +0000 UTC m=+1198.620803412" watchObservedRunningTime="2025-10-09 13:45:22.848555913 +0000 UTC m=+1198.622346952" Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.883889 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.9856409189999997 podStartE2EDuration="5.883871291s" podCreationTimestamp="2025-10-09 13:45:17 +0000 UTC" firstStartedPulling="2025-10-09 13:45:18.944259972 +0000 UTC m=+1194.718051011" lastFinishedPulling="2025-10-09 13:45:21.842490344 +0000 UTC m=+1197.616281383" observedRunningTime="2025-10-09 13:45:22.868232716 +0000 UTC m=+1198.642023755" watchObservedRunningTime="2025-10-09 13:45:22.883871291 +0000 UTC m=+1198.657662330" Oct 09 13:45:22 crc kubenswrapper[4762]: I1009 13:45:22.885607 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 09 13:45:23 crc kubenswrapper[4762]: I1009 13:45:23.302977 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:23 crc kubenswrapper[4762]: E1009 13:45:23.303245 4762 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 09 13:45:23 crc kubenswrapper[4762]: E1009 13:45:23.303853 4762 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 09 13:45:23 crc kubenswrapper[4762]: E1009 13:45:23.304010 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift podName:1477cefa-db6e-45d7-adb1-d7c1369a3610 nodeName:}" failed. No retries permitted until 2025-10-09 13:45:31.303989733 +0000 UTC m=+1207.077780772 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift") pod "swift-storage-0" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610") : configmap "swift-ring-files" not found Oct 09 13:45:23 crc kubenswrapper[4762]: I1009 13:45:23.336493 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 09 13:45:24 crc kubenswrapper[4762]: I1009 13:45:24.797131 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 09 13:45:24 crc kubenswrapper[4762]: I1009 13:45:24.856576 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 09 13:45:26 crc kubenswrapper[4762]: I1009 13:45:26.205798 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:26 crc kubenswrapper[4762]: I1009 13:45:26.693023 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:45:26 crc kubenswrapper[4762]: I1009 13:45:26.761304 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:26 crc kubenswrapper[4762]: I1009 13:45:26.858400 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="dnsmasq-dns" containerID="cri-o://509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0" gracePeriod=10 Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.336268 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.376090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config\") pod \"e97796df-8a16-4687-b6c1-23b06accd1e7\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.376149 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc\") pod \"e97796df-8a16-4687-b6c1-23b06accd1e7\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.376190 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb\") pod \"e97796df-8a16-4687-b6c1-23b06accd1e7\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.376222 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hlb4\" (UniqueName: \"kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4\") pod \"e97796df-8a16-4687-b6c1-23b06accd1e7\" (UID: \"e97796df-8a16-4687-b6c1-23b06accd1e7\") " Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.383009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4" (OuterVolumeSpecName: "kube-api-access-5hlb4") pod "e97796df-8a16-4687-b6c1-23b06accd1e7" (UID: "e97796df-8a16-4687-b6c1-23b06accd1e7"). InnerVolumeSpecName "kube-api-access-5hlb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.415767 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e97796df-8a16-4687-b6c1-23b06accd1e7" (UID: "e97796df-8a16-4687-b6c1-23b06accd1e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.415751 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e97796df-8a16-4687-b6c1-23b06accd1e7" (UID: "e97796df-8a16-4687-b6c1-23b06accd1e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.418521 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config" (OuterVolumeSpecName: "config") pod "e97796df-8a16-4687-b6c1-23b06accd1e7" (UID: "e97796df-8a16-4687-b6c1-23b06accd1e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.478553 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.478594 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.478607 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e97796df-8a16-4687-b6c1-23b06accd1e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.478621 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hlb4\" (UniqueName: \"kubernetes.io/projected/e97796df-8a16-4687-b6c1-23b06accd1e7-kube-api-access-5hlb4\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.868122 4762 generic.go:334] "Generic (PLEG): container finished" podID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerID="509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0" exitCode=0 Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.868165 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.868172 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" event={"ID":"e97796df-8a16-4687-b6c1-23b06accd1e7","Type":"ContainerDied","Data":"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0"} Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.868268 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-xqzkv" event={"ID":"e97796df-8a16-4687-b6c1-23b06accd1e7","Type":"ContainerDied","Data":"baea267c0d40861da6a8239f04f4d397d783e23a9051327840f1c30d9e0d5bf2"} Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.868311 4762 scope.go:117] "RemoveContainer" containerID="509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.897947 4762 scope.go:117] "RemoveContainer" containerID="886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.905189 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.920995 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-xqzkv"] Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.932811 4762 scope.go:117] "RemoveContainer" containerID="509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0" Oct 09 13:45:27 crc kubenswrapper[4762]: E1009 13:45:27.933219 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0\": container with ID starting with 509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0 not found: ID does not exist" containerID="509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.933274 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0"} err="failed to get container status \"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0\": rpc error: code = NotFound desc = could not find container \"509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0\": container with ID starting with 509d14b3a51e628c5017507a8850a29bc53e7a3cad82ce0a5fe4d62b3645cfe0 not found: ID does not exist" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.933303 4762 scope.go:117] "RemoveContainer" containerID="886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4" Oct 09 13:45:27 crc kubenswrapper[4762]: E1009 13:45:27.933557 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4\": container with ID starting with 886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4 not found: ID does not exist" containerID="886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4" Oct 09 13:45:27 crc kubenswrapper[4762]: I1009 13:45:27.933586 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4"} err="failed to get container status \"886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4\": rpc error: code = NotFound desc = could not find container \"886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4\": container with ID starting with 886cf1623ea017fa655589d278bcc9f23470dbfe8f19c0dcff65ed6d5df954a4 not found: ID does not exist" Oct 09 13:45:28 crc kubenswrapper[4762]: I1009 13:45:28.973843 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" path="/var/lib/kubelet/pods/e97796df-8a16-4687-b6c1-23b06accd1e7/volumes" Oct 09 13:45:30 crc kubenswrapper[4762]: E1009 13:45:30.481267 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44b61157_b706_44e2_9ca9_a5709e68f92c.slice/crio-07fd9344064d49467a79ce1c38657c51dcfa85dbdc1652c4a2c59fb99501a1e0.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.341848 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.349869 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"swift-storage-0\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " pod="openstack/swift-storage-0" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.637033 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.854360 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-cwprf"] Oct 09 13:45:31 crc kubenswrapper[4762]: E1009 13:45:31.855183 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="init" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855202 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="init" Oct 09 13:45:31 crc kubenswrapper[4762]: E1009 13:45:31.855222 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="init" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855229 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="init" Oct 09 13:45:31 crc kubenswrapper[4762]: E1009 13:45:31.855269 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855279 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: E1009 13:45:31.855297 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855303 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855516 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82f1ee5-3ab3-4672-b5a6-bc0e94a795a6" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.855540 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e97796df-8a16-4687-b6c1-23b06accd1e7" containerName="dnsmasq-dns" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.859003 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.863783 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cwprf"] Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.903901 4762 generic.go:334] "Generic (PLEG): container finished" podID="44b61157-b706-44e2-9ca9-a5709e68f92c" containerID="07fd9344064d49467a79ce1c38657c51dcfa85dbdc1652c4a2c59fb99501a1e0" exitCode=0 Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.904007 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tdgxl" event={"ID":"44b61157-b706-44e2-9ca9-a5709e68f92c","Type":"ContainerDied","Data":"07fd9344064d49467a79ce1c38657c51dcfa85dbdc1652c4a2c59fb99501a1e0"} Oct 09 13:45:31 crc kubenswrapper[4762]: I1009 13:45:31.951129 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhn9d\" (UniqueName: \"kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d\") pod \"keystone-db-create-cwprf\" (UID: \"f82e3f43-b73d-41d9-8ce1-c466e29055a5\") " pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.052573 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhn9d\" (UniqueName: \"kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d\") pod \"keystone-db-create-cwprf\" (UID: \"f82e3f43-b73d-41d9-8ce1-c466e29055a5\") " pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.067529 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-l6zst"] Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.068779 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-l6zst" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.076555 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-l6zst"] Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.081173 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhn9d\" (UniqueName: \"kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d\") pod \"keystone-db-create-cwprf\" (UID: \"f82e3f43-b73d-41d9-8ce1-c466e29055a5\") " pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.154421 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55bf4\" (UniqueName: \"kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4\") pod \"placement-db-create-l6zst\" (UID: \"3e6d85af-3e7b-4f48-a462-e365a2dfa511\") " pod="openstack/placement-db-create-l6zst" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.180034 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.190625 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:45:32 crc kubenswrapper[4762]: W1009 13:45:32.201111 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1477cefa_db6e_45d7_adb1_d7c1369a3610.slice/crio-d6d7950663f653c7876dea9d334c7f0696daafd78e50903800dd678990bb04e2 WatchSource:0}: Error finding container d6d7950663f653c7876dea9d334c7f0696daafd78e50903800dd678990bb04e2: Status 404 returned error can't find the container with id d6d7950663f653c7876dea9d334c7f0696daafd78e50903800dd678990bb04e2 Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.256069 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55bf4\" (UniqueName: \"kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4\") pod \"placement-db-create-l6zst\" (UID: \"3e6d85af-3e7b-4f48-a462-e365a2dfa511\") " pod="openstack/placement-db-create-l6zst" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.276982 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55bf4\" (UniqueName: \"kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4\") pod \"placement-db-create-l6zst\" (UID: \"3e6d85af-3e7b-4f48-a462-e365a2dfa511\") " pod="openstack/placement-db-create-l6zst" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.406713 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-z9z8b"] Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.407770 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.420042 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-l6zst" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.433726 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-z9z8b"] Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.460674 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8n7c\" (UniqueName: \"kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c\") pod \"glance-db-create-z9z8b\" (UID: \"38d0e5fa-4372-413b-8a13-b3fb22f22463\") " pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.561876 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8n7c\" (UniqueName: \"kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c\") pod \"glance-db-create-z9z8b\" (UID: \"38d0e5fa-4372-413b-8a13-b3fb22f22463\") " pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.583032 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8n7c\" (UniqueName: \"kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c\") pod \"glance-db-create-z9z8b\" (UID: \"38d0e5fa-4372-413b-8a13-b3fb22f22463\") " pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.632582 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cwprf"] Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.732991 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.860133 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-l6zst"] Oct 09 13:45:32 crc kubenswrapper[4762]: W1009 13:45:32.868676 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e6d85af_3e7b_4f48_a462_e365a2dfa511.slice/crio-3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c WatchSource:0}: Error finding container 3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c: Status 404 returned error can't find the container with id 3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.913044 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cwprf" event={"ID":"f82e3f43-b73d-41d9-8ce1-c466e29055a5","Type":"ContainerStarted","Data":"8197ea145f121161917d25925736be3b3600d5eb97e70ac0f8390022efbc57c4"} Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.914986 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-l6zst" event={"ID":"3e6d85af-3e7b-4f48-a462-e365a2dfa511","Type":"ContainerStarted","Data":"3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c"} Oct 09 13:45:32 crc kubenswrapper[4762]: I1009 13:45:32.917271 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"d6d7950663f653c7876dea9d334c7f0696daafd78e50903800dd678990bb04e2"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.197691 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-z9z8b"] Oct 09 13:45:33 crc kubenswrapper[4762]: W1009 13:45:33.206841 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38d0e5fa_4372_413b_8a13_b3fb22f22463.slice/crio-372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4 WatchSource:0}: Error finding container 372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4: Status 404 returned error can't find the container with id 372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4 Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.237754 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.274490 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.274599 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.274668 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.274711 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.275657 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rk5jn\" (UniqueName: \"kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.275707 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.275759 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift\") pod \"44b61157-b706-44e2-9ca9-a5709e68f92c\" (UID: \"44b61157-b706-44e2-9ca9-a5709e68f92c\") " Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.275745 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.276153 4762 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.276856 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.279185 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn" (OuterVolumeSpecName: "kube-api-access-rk5jn") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "kube-api-access-rk5jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.284790 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.298372 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts" (OuterVolumeSpecName: "scripts") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.301790 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.302340 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44b61157-b706-44e2-9ca9-a5709e68f92c" (UID: "44b61157-b706-44e2-9ca9-a5709e68f92c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377721 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rk5jn\" (UniqueName: \"kubernetes.io/projected/44b61157-b706-44e2-9ca9-a5709e68f92c-kube-api-access-rk5jn\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377760 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377771 4762 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44b61157-b706-44e2-9ca9-a5709e68f92c-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377780 4762 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377788 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44b61157-b706-44e2-9ca9-a5709e68f92c-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.377796 4762 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44b61157-b706-44e2-9ca9-a5709e68f92c-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.392676 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.925123 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-z9z8b" event={"ID":"38d0e5fa-4372-413b-8a13-b3fb22f22463","Type":"ContainerStarted","Data":"52514876727e3f8cb7f78f04e85f8828644db736dd64bbb55eb9ec12c84a6a78"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.925502 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-z9z8b" event={"ID":"38d0e5fa-4372-413b-8a13-b3fb22f22463","Type":"ContainerStarted","Data":"372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.926633 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cwprf" event={"ID":"f82e3f43-b73d-41d9-8ce1-c466e29055a5","Type":"ContainerStarted","Data":"fd71be3f0a78bec16c0fab0dce3a39484a5e76d6011729edbd85cf32e87818f4"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.929173 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-l6zst" event={"ID":"3e6d85af-3e7b-4f48-a462-e365a2dfa511","Type":"ContainerStarted","Data":"fa1145d18f0fbec16d02d4bb83ae8cb23c753337e49c0597f6370eaa7a5f4d1e"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.932783 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-tdgxl" event={"ID":"44b61157-b706-44e2-9ca9-a5709e68f92c","Type":"ContainerDied","Data":"2ff74bdec45ab1f2829ff66b92f63e9639903e954be91dfb0bad631370af9b5c"} Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.932832 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ff74bdec45ab1f2829ff66b92f63e9639903e954be91dfb0bad631370af9b5c" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.932852 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tdgxl" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.944703 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-z9z8b" podStartSLOduration=1.942567527 podStartE2EDuration="1.942567527s" podCreationTimestamp="2025-10-09 13:45:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:33.940872601 +0000 UTC m=+1209.714663650" watchObservedRunningTime="2025-10-09 13:45:33.942567527 +0000 UTC m=+1209.716358566" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.957779 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-cwprf" podStartSLOduration=2.95775857 podStartE2EDuration="2.95775857s" podCreationTimestamp="2025-10-09 13:45:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:33.956364483 +0000 UTC m=+1209.730155522" watchObservedRunningTime="2025-10-09 13:45:33.95775857 +0000 UTC m=+1209.731549609" Oct 09 13:45:33 crc kubenswrapper[4762]: I1009 13:45:33.980383 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-l6zst" podStartSLOduration=1.980358501 podStartE2EDuration="1.980358501s" podCreationTimestamp="2025-10-09 13:45:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:33.974504025 +0000 UTC m=+1209.748295074" watchObservedRunningTime="2025-10-09 13:45:33.980358501 +0000 UTC m=+1209.754149550" Oct 09 13:45:34 crc kubenswrapper[4762]: I1009 13:45:34.941457 4762 generic.go:334] "Generic (PLEG): container finished" podID="f82e3f43-b73d-41d9-8ce1-c466e29055a5" containerID="fd71be3f0a78bec16c0fab0dce3a39484a5e76d6011729edbd85cf32e87818f4" exitCode=0 Oct 09 13:45:34 crc kubenswrapper[4762]: I1009 13:45:34.941517 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cwprf" event={"ID":"f82e3f43-b73d-41d9-8ce1-c466e29055a5","Type":"ContainerDied","Data":"fd71be3f0a78bec16c0fab0dce3a39484a5e76d6011729edbd85cf32e87818f4"} Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.399666 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.533094 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhn9d\" (UniqueName: \"kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d\") pod \"f82e3f43-b73d-41d9-8ce1-c466e29055a5\" (UID: \"f82e3f43-b73d-41d9-8ce1-c466e29055a5\") " Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.538781 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d" (OuterVolumeSpecName: "kube-api-access-bhn9d") pod "f82e3f43-b73d-41d9-8ce1-c466e29055a5" (UID: "f82e3f43-b73d-41d9-8ce1-c466e29055a5"). InnerVolumeSpecName "kube-api-access-bhn9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.635957 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhn9d\" (UniqueName: \"kubernetes.io/projected/f82e3f43-b73d-41d9-8ce1-c466e29055a5-kube-api-access-bhn9d\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.957986 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cwprf" event={"ID":"f82e3f43-b73d-41d9-8ce1-c466e29055a5","Type":"ContainerDied","Data":"8197ea145f121161917d25925736be3b3600d5eb97e70ac0f8390022efbc57c4"} Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.958024 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8197ea145f121161917d25925736be3b3600d5eb97e70ac0f8390022efbc57c4" Oct 09 13:45:36 crc kubenswrapper[4762]: I1009 13:45:36.958039 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cwprf" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.455228 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-nknqr" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" probeResult="failure" output=< Oct 09 13:45:38 crc kubenswrapper[4762]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 09 13:45:38 crc kubenswrapper[4762]: > Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.484710 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.493157 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.701861 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nknqr-config-4ksvn"] Oct 09 13:45:38 crc kubenswrapper[4762]: E1009 13:45:38.702474 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82e3f43-b73d-41d9-8ce1-c466e29055a5" containerName="mariadb-database-create" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.702510 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82e3f43-b73d-41d9-8ce1-c466e29055a5" containerName="mariadb-database-create" Oct 09 13:45:38 crc kubenswrapper[4762]: E1009 13:45:38.702566 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44b61157-b706-44e2-9ca9-a5709e68f92c" containerName="swift-ring-rebalance" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.702596 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="44b61157-b706-44e2-9ca9-a5709e68f92c" containerName="swift-ring-rebalance" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.702957 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="44b61157-b706-44e2-9ca9-a5709e68f92c" containerName="swift-ring-rebalance" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.703032 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82e3f43-b73d-41d9-8ce1-c466e29055a5" containerName="mariadb-database-create" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.704231 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.708271 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.713180 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nknqr-config-4ksvn"] Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.768810 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhsc7\" (UniqueName: \"kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.768882 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.768952 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.769060 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.769088 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.769110 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.870807 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.870909 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.870939 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.870967 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871036 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhsc7\" (UniqueName: \"kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871069 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871181 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871240 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871237 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.871916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.873429 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.898344 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhsc7\" (UniqueName: \"kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7\") pod \"ovn-controller-nknqr-config-4ksvn\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.977718 4762 generic.go:334] "Generic (PLEG): container finished" podID="38d0e5fa-4372-413b-8a13-b3fb22f22463" containerID="52514876727e3f8cb7f78f04e85f8828644db736dd64bbb55eb9ec12c84a6a78" exitCode=0 Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.980267 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-z9z8b" event={"ID":"38d0e5fa-4372-413b-8a13-b3fb22f22463","Type":"ContainerDied","Data":"52514876727e3f8cb7f78f04e85f8828644db736dd64bbb55eb9ec12c84a6a78"} Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.981435 4762 generic.go:334] "Generic (PLEG): container finished" podID="3e6d85af-3e7b-4f48-a462-e365a2dfa511" containerID="fa1145d18f0fbec16d02d4bb83ae8cb23c753337e49c0597f6370eaa7a5f4d1e" exitCode=0 Oct 09 13:45:38 crc kubenswrapper[4762]: I1009 13:45:38.981523 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-l6zst" event={"ID":"3e6d85af-3e7b-4f48-a462-e365a2dfa511","Type":"ContainerDied","Data":"fa1145d18f0fbec16d02d4bb83ae8cb23c753337e49c0597f6370eaa7a5f4d1e"} Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.022275 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.609925 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nknqr-config-4ksvn"] Oct 09 13:45:39 crc kubenswrapper[4762]: W1009 13:45:39.618407 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78444e09_7364_4b61_953c_14f01df9870d.slice/crio-9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e WatchSource:0}: Error finding container 9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e: Status 404 returned error can't find the container with id 9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.995379 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce"} Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.995716 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95"} Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.995726 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80"} Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.998069 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr-config-4ksvn" event={"ID":"78444e09-7364-4b61-953c-14f01df9870d","Type":"ContainerStarted","Data":"faba4f945d7fe718ff65fefc602b76d4fc0d02184c4e3eebec8453abeee78454"} Oct 09 13:45:39 crc kubenswrapper[4762]: I1009 13:45:39.998121 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr-config-4ksvn" event={"ID":"78444e09-7364-4b61-953c-14f01df9870d","Type":"ContainerStarted","Data":"9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e"} Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.016786 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-nknqr-config-4ksvn" podStartSLOduration=2.01676541 podStartE2EDuration="2.01676541s" podCreationTimestamp="2025-10-09 13:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:40.015473806 +0000 UTC m=+1215.789264845" watchObservedRunningTime="2025-10-09 13:45:40.01676541 +0000 UTC m=+1215.790556449" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.439605 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-l6zst" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.448918 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.503386 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55bf4\" (UniqueName: \"kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4\") pod \"3e6d85af-3e7b-4f48-a462-e365a2dfa511\" (UID: \"3e6d85af-3e7b-4f48-a462-e365a2dfa511\") " Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.503613 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8n7c\" (UniqueName: \"kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c\") pod \"38d0e5fa-4372-413b-8a13-b3fb22f22463\" (UID: \"38d0e5fa-4372-413b-8a13-b3fb22f22463\") " Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.510568 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4" (OuterVolumeSpecName: "kube-api-access-55bf4") pod "3e6d85af-3e7b-4f48-a462-e365a2dfa511" (UID: "3e6d85af-3e7b-4f48-a462-e365a2dfa511"). InnerVolumeSpecName "kube-api-access-55bf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.511142 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c" (OuterVolumeSpecName: "kube-api-access-t8n7c") pod "38d0e5fa-4372-413b-8a13-b3fb22f22463" (UID: "38d0e5fa-4372-413b-8a13-b3fb22f22463"). InnerVolumeSpecName "kube-api-access-t8n7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.605425 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8n7c\" (UniqueName: \"kubernetes.io/projected/38d0e5fa-4372-413b-8a13-b3fb22f22463-kube-api-access-t8n7c\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:40 crc kubenswrapper[4762]: I1009 13:45:40.605466 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55bf4\" (UniqueName: \"kubernetes.io/projected/3e6d85af-3e7b-4f48-a462-e365a2dfa511-kube-api-access-55bf4\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:40 crc kubenswrapper[4762]: E1009 13:45:40.696246 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ca3d4c1_b9e5_4443_8102_7739602cbd2f.slice/crio-conmon-6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ca3d4c1_b9e5_4443_8102_7739602cbd2f.slice/crio-6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.006421 4762 generic.go:334] "Generic (PLEG): container finished" podID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerID="6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3" exitCode=0 Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.006571 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerDied","Data":"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3"} Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.008220 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-l6zst" event={"ID":"3e6d85af-3e7b-4f48-a462-e365a2dfa511","Type":"ContainerDied","Data":"3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c"} Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.008275 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e623b3c73bdc9cf78c039567fe35550a2aa57d86efaac8c18939e3726a8bb4c" Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.008234 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-l6zst" Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.013168 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217"} Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.014842 4762 generic.go:334] "Generic (PLEG): container finished" podID="78444e09-7364-4b61-953c-14f01df9870d" containerID="faba4f945d7fe718ff65fefc602b76d4fc0d02184c4e3eebec8453abeee78454" exitCode=0 Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.014912 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr-config-4ksvn" event={"ID":"78444e09-7364-4b61-953c-14f01df9870d","Type":"ContainerDied","Data":"faba4f945d7fe718ff65fefc602b76d4fc0d02184c4e3eebec8453abeee78454"} Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.016736 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-z9z8b" event={"ID":"38d0e5fa-4372-413b-8a13-b3fb22f22463","Type":"ContainerDied","Data":"372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4"} Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.016791 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="372276b52d8f0be8adbcc3e838b102ec5bd38c8205c7fde5ae5d5c54bce981c4" Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.016864 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-z9z8b" Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.969373 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:45:41 crc kubenswrapper[4762]: I1009 13:45:41.969754 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.027157 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerStarted","Data":"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef"} Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.027620 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.060265 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.292803838 podStartE2EDuration="1m25.060240882s" podCreationTimestamp="2025-10-09 13:44:17 +0000 UTC" firstStartedPulling="2025-10-09 13:44:19.066126643 +0000 UTC m=+1134.839917682" lastFinishedPulling="2025-10-09 13:45:06.833563687 +0000 UTC m=+1182.607354726" observedRunningTime="2025-10-09 13:45:42.056487133 +0000 UTC m=+1217.830278172" watchObservedRunningTime="2025-10-09 13:45:42.060240882 +0000 UTC m=+1217.834031941" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.390713 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.438402 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.438547 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.438619 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.445374 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.445475 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhsc7\" (UniqueName: \"kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.445603 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.445658 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts\") pod \"78444e09-7364-4b61-953c-14f01df9870d\" (UID: \"78444e09-7364-4b61-953c-14f01df9870d\") " Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.445383 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run" (OuterVolumeSpecName: "var-run") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.446461 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.446979 4762 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.447839 4762 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.447866 4762 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/78444e09-7364-4b61-953c-14f01df9870d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.447018 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts" (OuterVolumeSpecName: "scripts") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.447816 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.454498 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7" (OuterVolumeSpecName: "kube-api-access-xhsc7") pod "78444e09-7364-4b61-953c-14f01df9870d" (UID: "78444e09-7364-4b61-953c-14f01df9870d"). InnerVolumeSpecName "kube-api-access-xhsc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.549960 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.550033 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhsc7\" (UniqueName: \"kubernetes.io/projected/78444e09-7364-4b61-953c-14f01df9870d-kube-api-access-xhsc7\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.550050 4762 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/78444e09-7364-4b61-953c-14f01df9870d-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.551701 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-7662-account-create-zsqv2"] Oct 09 13:45:42 crc kubenswrapper[4762]: E1009 13:45:42.552104 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78444e09-7364-4b61-953c-14f01df9870d" containerName="ovn-config" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552180 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="78444e09-7364-4b61-953c-14f01df9870d" containerName="ovn-config" Oct 09 13:45:42 crc kubenswrapper[4762]: E1009 13:45:42.552264 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38d0e5fa-4372-413b-8a13-b3fb22f22463" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552326 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="38d0e5fa-4372-413b-8a13-b3fb22f22463" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: E1009 13:45:42.552383 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6d85af-3e7b-4f48-a462-e365a2dfa511" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552432 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6d85af-3e7b-4f48-a462-e365a2dfa511" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552681 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="78444e09-7364-4b61-953c-14f01df9870d" containerName="ovn-config" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552793 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="38d0e5fa-4372-413b-8a13-b3fb22f22463" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.552900 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6d85af-3e7b-4f48-a462-e365a2dfa511" containerName="mariadb-database-create" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.553502 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.556307 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.561948 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7662-account-create-zsqv2"] Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.651077 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wngcv\" (UniqueName: \"kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv\") pod \"glance-7662-account-create-zsqv2\" (UID: \"761002a8-1d69-4d8e-b2ff-78e1b853b54d\") " pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.752655 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wngcv\" (UniqueName: \"kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv\") pod \"glance-7662-account-create-zsqv2\" (UID: \"761002a8-1d69-4d8e-b2ff-78e1b853b54d\") " pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.775494 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wngcv\" (UniqueName: \"kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv\") pod \"glance-7662-account-create-zsqv2\" (UID: \"761002a8-1d69-4d8e-b2ff-78e1b853b54d\") " pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:42 crc kubenswrapper[4762]: I1009 13:45:42.870833 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.038981 4762 generic.go:334] "Generic (PLEG): container finished" podID="c3841f71-5204-469f-b755-e030281725d1" containerID="89d0be629bcb0cac06f9243b7e363d8a395f63ea55dd81e464a984f45d7e31cc" exitCode=0 Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.039068 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerDied","Data":"89d0be629bcb0cac06f9243b7e363d8a395f63ea55dd81e464a984f45d7e31cc"} Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.045887 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr-config-4ksvn" event={"ID":"78444e09-7364-4b61-953c-14f01df9870d","Type":"ContainerDied","Data":"9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e"} Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.045922 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr-config-4ksvn" Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.045930 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9897f8507b891fbda27ce7cc598d7094cc3ea4a0f7c9dc505b0c0e096a03a18e" Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.159072 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nknqr-config-4ksvn"] Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.165444 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-nknqr-config-4ksvn"] Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.282464 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7662-account-create-zsqv2"] Oct 09 13:45:43 crc kubenswrapper[4762]: W1009 13:45:43.287370 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod761002a8_1d69_4d8e_b2ff_78e1b853b54d.slice/crio-d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07 WatchSource:0}: Error finding container d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07: Status 404 returned error can't find the container with id d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07 Oct 09 13:45:43 crc kubenswrapper[4762]: I1009 13:45:43.473741 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-nknqr" Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.055085 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerStarted","Data":"7e81d9e72b263dd6748137f8d9a685b369c4a166ceded69631770333e18be646"} Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.055317 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.056620 4762 generic.go:334] "Generic (PLEG): container finished" podID="761002a8-1d69-4d8e-b2ff-78e1b853b54d" containerID="641a0b1793de5d15fd2bf3ee9403e19a637a304d2521c7dd1ef6f5c4e8a5eee4" exitCode=0 Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.056689 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7662-account-create-zsqv2" event={"ID":"761002a8-1d69-4d8e-b2ff-78e1b853b54d","Type":"ContainerDied","Data":"641a0b1793de5d15fd2bf3ee9403e19a637a304d2521c7dd1ef6f5c4e8a5eee4"} Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.056713 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7662-account-create-zsqv2" event={"ID":"761002a8-1d69-4d8e-b2ff-78e1b853b54d","Type":"ContainerStarted","Data":"d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07"} Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.091575 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371949.763224 podStartE2EDuration="1m27.091552222s" podCreationTimestamp="2025-10-09 13:44:17 +0000 UTC" firstStartedPulling="2025-10-09 13:44:19.824100611 +0000 UTC m=+1135.597891660" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:44.086106617 +0000 UTC m=+1219.859897666" watchObservedRunningTime="2025-10-09 13:45:44.091552222 +0000 UTC m=+1219.865343261" Oct 09 13:45:44 crc kubenswrapper[4762]: I1009 13:45:44.996705 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78444e09-7364-4b61-953c-14f01df9870d" path="/var/lib/kubelet/pods/78444e09-7364-4b61-953c-14f01df9870d/volumes" Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.070850 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384"} Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.070930 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18"} Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.070950 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45"} Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.070959 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad"} Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.407902 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.508054 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wngcv\" (UniqueName: \"kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv\") pod \"761002a8-1d69-4d8e-b2ff-78e1b853b54d\" (UID: \"761002a8-1d69-4d8e-b2ff-78e1b853b54d\") " Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.513846 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv" (OuterVolumeSpecName: "kube-api-access-wngcv") pod "761002a8-1d69-4d8e-b2ff-78e1b853b54d" (UID: "761002a8-1d69-4d8e-b2ff-78e1b853b54d"). InnerVolumeSpecName "kube-api-access-wngcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:45:45 crc kubenswrapper[4762]: I1009 13:45:45.610064 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wngcv\" (UniqueName: \"kubernetes.io/projected/761002a8-1d69-4d8e-b2ff-78e1b853b54d-kube-api-access-wngcv\") on node \"crc\" DevicePath \"\"" Oct 09 13:45:46 crc kubenswrapper[4762]: I1009 13:45:46.079572 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7662-account-create-zsqv2" event={"ID":"761002a8-1d69-4d8e-b2ff-78e1b853b54d","Type":"ContainerDied","Data":"d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07"} Oct 09 13:45:46 crc kubenswrapper[4762]: I1009 13:45:46.079612 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7662-account-create-zsqv2" Oct 09 13:45:46 crc kubenswrapper[4762]: I1009 13:45:46.079619 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2c6c6c483e8fc25bda9f5fd8aef05618fe983c469efcba74dfc07c8cebe6d07" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.090776 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5"} Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.091309 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99"} Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.091321 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676"} Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.091329 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62"} Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.769430 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hl7q6"] Oct 09 13:45:47 crc kubenswrapper[4762]: E1009 13:45:47.769842 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="761002a8-1d69-4d8e-b2ff-78e1b853b54d" containerName="mariadb-account-create" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.769861 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="761002a8-1d69-4d8e-b2ff-78e1b853b54d" containerName="mariadb-account-create" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.770058 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="761002a8-1d69-4d8e-b2ff-78e1b853b54d" containerName="mariadb-account-create" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.772567 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.786868 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.787021 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tsjl8" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.796356 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hl7q6"] Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.845475 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.845519 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.845606 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvjxd\" (UniqueName: \"kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.845630 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.947158 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvjxd\" (UniqueName: \"kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.947500 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.947588 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.947619 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.953436 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.953556 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.957081 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:47 crc kubenswrapper[4762]: I1009 13:45:47.980873 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvjxd\" (UniqueName: \"kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd\") pod \"glance-db-sync-hl7q6\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:48 crc kubenswrapper[4762]: I1009 13:45:48.106573 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0"} Oct 09 13:45:48 crc kubenswrapper[4762]: I1009 13:45:48.106624 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a"} Oct 09 13:45:48 crc kubenswrapper[4762]: I1009 13:45:48.148043 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hl7q6" Oct 09 13:45:48 crc kubenswrapper[4762]: I1009 13:45:48.711220 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hl7q6"] Oct 09 13:45:48 crc kubenswrapper[4762]: W1009 13:45:48.714299 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cf35063_94f0_4dd9_9282_060a7d5360cc.slice/crio-b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7 WatchSource:0}: Error finding container b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7: Status 404 returned error can't find the container with id b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7 Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.118858 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerStarted","Data":"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2"} Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.122291 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hl7q6" event={"ID":"8cf35063-94f0-4dd9-9282-060a7d5360cc","Type":"ContainerStarted","Data":"b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7"} Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.158081 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.10646123 podStartE2EDuration="35.158032622s" podCreationTimestamp="2025-10-09 13:45:14 +0000 UTC" firstStartedPulling="2025-10-09 13:45:32.207696773 +0000 UTC m=+1207.981487812" lastFinishedPulling="2025-10-09 13:45:46.259268165 +0000 UTC m=+1222.033059204" observedRunningTime="2025-10-09 13:45:49.14968179 +0000 UTC m=+1224.923472839" watchObservedRunningTime="2025-10-09 13:45:49.158032622 +0000 UTC m=+1224.931823661" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.417393 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.419077 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.420979 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.434748 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472227 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j6bf\" (UniqueName: \"kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472290 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472320 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472336 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472359 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.472383 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.573991 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j6bf\" (UniqueName: \"kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.574067 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.574095 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.574112 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.574152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.574185 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.575168 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.575275 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.575344 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.575384 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.575514 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.597196 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j6bf\" (UniqueName: \"kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf\") pod \"dnsmasq-dns-6d5b6d6b67-6qflh\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:49 crc kubenswrapper[4762]: I1009 13:45:49.739702 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:50 crc kubenswrapper[4762]: I1009 13:45:50.338308 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.144038 4762 generic.go:334] "Generic (PLEG): container finished" podID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerID="5c6c11f71c1b131886d6d9889dab31b0d63ead94f5e9e79f72323cf2e6d95dd1" exitCode=0 Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.144142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" event={"ID":"15ffec36-732e-48f3-b5b8-52038bc8da8a","Type":"ContainerDied","Data":"5c6c11f71c1b131886d6d9889dab31b0d63ead94f5e9e79f72323cf2e6d95dd1"} Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.144703 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" event={"ID":"15ffec36-732e-48f3-b5b8-52038bc8da8a","Type":"ContainerStarted","Data":"61f4b21a80281fa2ba2d959b8494f14efaf9863927899db1a224295f0cfcfdb2"} Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.873165 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7a22-account-create-8jpv8"] Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.876963 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.883669 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 09 13:45:51 crc kubenswrapper[4762]: I1009 13:45:51.889461 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7a22-account-create-8jpv8"] Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.021166 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x72qj\" (UniqueName: \"kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj\") pod \"keystone-7a22-account-create-8jpv8\" (UID: \"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92\") " pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.122551 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x72qj\" (UniqueName: \"kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj\") pod \"keystone-7a22-account-create-8jpv8\" (UID: \"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92\") " pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.150995 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x72qj\" (UniqueName: \"kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj\") pod \"keystone-7a22-account-create-8jpv8\" (UID: \"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92\") " pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.155040 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" event={"ID":"15ffec36-732e-48f3-b5b8-52038bc8da8a","Type":"ContainerStarted","Data":"1a1d7f56dafc809021ef97bf0a3dd8ebb7abbcae63d2e822e02f7c3c37e43677"} Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.155215 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.187877 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podStartSLOduration=3.18785439 podStartE2EDuration="3.18785439s" podCreationTimestamp="2025-10-09 13:45:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:45:52.171979118 +0000 UTC m=+1227.945770157" watchObservedRunningTime="2025-10-09 13:45:52.18785439 +0000 UTC m=+1227.961645429" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.198475 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.201219 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-d6ff-account-create-zwpvv"] Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.202458 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.206070 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.209116 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d6ff-account-create-zwpvv"] Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.325786 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpvcm\" (UniqueName: \"kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm\") pod \"placement-d6ff-account-create-zwpvv\" (UID: \"11408afa-4f7f-46b2-b499-91ab1e966c35\") " pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.428147 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpvcm\" (UniqueName: \"kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm\") pod \"placement-d6ff-account-create-zwpvv\" (UID: \"11408afa-4f7f-46b2-b499-91ab1e966c35\") " pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.447221 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpvcm\" (UniqueName: \"kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm\") pod \"placement-d6ff-account-create-zwpvv\" (UID: \"11408afa-4f7f-46b2-b499-91ab1e966c35\") " pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.603156 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:45:52 crc kubenswrapper[4762]: I1009 13:45:52.640365 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7a22-account-create-8jpv8"] Oct 09 13:45:52 crc kubenswrapper[4762]: W1009 13:45:52.669541 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfda4b3e7_20b8_4d16_8ccd_d0e4ce4c2f92.slice/crio-574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d WatchSource:0}: Error finding container 574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d: Status 404 returned error can't find the container with id 574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d Oct 09 13:45:53 crc kubenswrapper[4762]: I1009 13:45:53.071230 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d6ff-account-create-zwpvv"] Oct 09 13:45:53 crc kubenswrapper[4762]: W1009 13:45:53.079844 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11408afa_4f7f_46b2_b499_91ab1e966c35.slice/crio-e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20 WatchSource:0}: Error finding container e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20: Status 404 returned error can't find the container with id e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20 Oct 09 13:45:53 crc kubenswrapper[4762]: I1009 13:45:53.167880 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d6ff-account-create-zwpvv" event={"ID":"11408afa-4f7f-46b2-b499-91ab1e966c35","Type":"ContainerStarted","Data":"e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20"} Oct 09 13:45:53 crc kubenswrapper[4762]: I1009 13:45:53.172160 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7a22-account-create-8jpv8" event={"ID":"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92","Type":"ContainerStarted","Data":"cc91c2ff33d27f9343274041d04cb4122ff55b695f477c0add7a66b6376d852e"} Oct 09 13:45:53 crc kubenswrapper[4762]: I1009 13:45:53.172544 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7a22-account-create-8jpv8" event={"ID":"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92","Type":"ContainerStarted","Data":"574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d"} Oct 09 13:45:54 crc kubenswrapper[4762]: I1009 13:45:54.180306 4762 generic.go:334] "Generic (PLEG): container finished" podID="fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" containerID="cc91c2ff33d27f9343274041d04cb4122ff55b695f477c0add7a66b6376d852e" exitCode=0 Oct 09 13:45:54 crc kubenswrapper[4762]: I1009 13:45:54.180827 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7a22-account-create-8jpv8" event={"ID":"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92","Type":"ContainerDied","Data":"cc91c2ff33d27f9343274041d04cb4122ff55b695f477c0add7a66b6376d852e"} Oct 09 13:45:54 crc kubenswrapper[4762]: I1009 13:45:54.182371 4762 generic.go:334] "Generic (PLEG): container finished" podID="11408afa-4f7f-46b2-b499-91ab1e966c35" containerID="3eb5d46a0831bc9a671cd336434c6355d1e71a3f3e87d654901e51d659111b04" exitCode=0 Oct 09 13:45:54 crc kubenswrapper[4762]: I1009 13:45:54.182402 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d6ff-account-create-zwpvv" event={"ID":"11408afa-4f7f-46b2-b499-91ab1e966c35","Type":"ContainerDied","Data":"3eb5d46a0831bc9a671cd336434c6355d1e71a3f3e87d654901e51d659111b04"} Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.420955 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.750095 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-snp5x"] Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.751290 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-snp5x" Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.757858 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-snp5x"] Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.851867 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-xdg42"] Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.853091 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xdg42" Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.860872 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xdg42"] Oct 09 13:45:58 crc kubenswrapper[4762]: I1009 13:45:58.937131 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcqtx\" (UniqueName: \"kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx\") pod \"cinder-db-create-snp5x\" (UID: \"de566a3c-21e4-4b4d-b555-2b040ea2b719\") " pod="openstack/cinder-db-create-snp5x" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.039210 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpfzj\" (UniqueName: \"kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj\") pod \"barbican-db-create-xdg42\" (UID: \"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f\") " pod="openstack/barbican-db-create-xdg42" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.039305 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcqtx\" (UniqueName: \"kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx\") pod \"cinder-db-create-snp5x\" (UID: \"de566a3c-21e4-4b4d-b555-2b040ea2b719\") " pod="openstack/cinder-db-create-snp5x" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.065680 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-lh6rx"] Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.070747 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lh6rx" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.072489 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcqtx\" (UniqueName: \"kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx\") pod \"cinder-db-create-snp5x\" (UID: \"de566a3c-21e4-4b4d-b555-2b040ea2b719\") " pod="openstack/cinder-db-create-snp5x" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.076688 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lh6rx"] Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.140850 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.149089 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpfzj\" (UniqueName: \"kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj\") pod \"barbican-db-create-xdg42\" (UID: \"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f\") " pod="openstack/barbican-db-create-xdg42" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.165483 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpfzj\" (UniqueName: \"kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj\") pod \"barbican-db-create-xdg42\" (UID: \"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f\") " pod="openstack/barbican-db-create-xdg42" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.175058 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xdg42" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.252958 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nznh8\" (UniqueName: \"kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8\") pod \"neutron-db-create-lh6rx\" (UID: \"deef8d3d-5ef0-44fd-94db-aa27c317b167\") " pod="openstack/neutron-db-create-lh6rx" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.354162 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nznh8\" (UniqueName: \"kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8\") pod \"neutron-db-create-lh6rx\" (UID: \"deef8d3d-5ef0-44fd-94db-aa27c317b167\") " pod="openstack/neutron-db-create-lh6rx" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.369879 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-snp5x" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.379535 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nznh8\" (UniqueName: \"kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8\") pod \"neutron-db-create-lh6rx\" (UID: \"deef8d3d-5ef0-44fd-94db-aa27c317b167\") " pod="openstack/neutron-db-create-lh6rx" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.427027 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lh6rx" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.740856 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.797885 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:45:59 crc kubenswrapper[4762]: I1009 13:45:59.798107 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="dnsmasq-dns" containerID="cri-o://23bc8d382d6857bb6b1145361ed8df721c39db5896196c008d510fde383469bd" gracePeriod=10 Oct 09 13:46:01 crc kubenswrapper[4762]: I1009 13:46:01.691659 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.117:5353: connect: connection refused" Oct 09 13:46:02 crc kubenswrapper[4762]: I1009 13:46:02.261438 4762 generic.go:334] "Generic (PLEG): container finished" podID="ee34933d-92fc-4723-b232-481e48b27ebf" containerID="23bc8d382d6857bb6b1145361ed8df721c39db5896196c008d510fde383469bd" exitCode=0 Oct 09 13:46:02 crc kubenswrapper[4762]: I1009 13:46:02.261542 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" event={"ID":"ee34933d-92fc-4723-b232-481e48b27ebf","Type":"ContainerDied","Data":"23bc8d382d6857bb6b1145361ed8df721c39db5896196c008d510fde383469bd"} Oct 09 13:46:02 crc kubenswrapper[4762]: I1009 13:46:02.807366 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:46:02 crc kubenswrapper[4762]: I1009 13:46:02.820582 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:02.867602 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:02.921690 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpvcm\" (UniqueName: \"kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm\") pod \"11408afa-4f7f-46b2-b499-91ab1e966c35\" (UID: \"11408afa-4f7f-46b2-b499-91ab1e966c35\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:02.922140 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x72qj\" (UniqueName: \"kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj\") pod \"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92\" (UID: \"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:02.926501 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj" (OuterVolumeSpecName: "kube-api-access-x72qj") pod "fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" (UID: "fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92"). InnerVolumeSpecName "kube-api-access-x72qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:02.927008 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm" (OuterVolumeSpecName: "kube-api-access-dpvcm") pod "11408afa-4f7f-46b2-b499-91ab1e966c35" (UID: "11408afa-4f7f-46b2-b499-91ab1e966c35"). InnerVolumeSpecName "kube-api-access-dpvcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.023895 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb\") pod \"ee34933d-92fc-4723-b232-481e48b27ebf\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.023976 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config\") pod \"ee34933d-92fc-4723-b232-481e48b27ebf\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.024010 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc\") pod \"ee34933d-92fc-4723-b232-481e48b27ebf\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.024045 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb\") pod \"ee34933d-92fc-4723-b232-481e48b27ebf\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.024081 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh8dz\" (UniqueName: \"kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz\") pod \"ee34933d-92fc-4723-b232-481e48b27ebf\" (UID: \"ee34933d-92fc-4723-b232-481e48b27ebf\") " Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.024398 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpvcm\" (UniqueName: \"kubernetes.io/projected/11408afa-4f7f-46b2-b499-91ab1e966c35-kube-api-access-dpvcm\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.024409 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x72qj\" (UniqueName: \"kubernetes.io/projected/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92-kube-api-access-x72qj\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.029972 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz" (OuterVolumeSpecName: "kube-api-access-bh8dz") pod "ee34933d-92fc-4723-b232-481e48b27ebf" (UID: "ee34933d-92fc-4723-b232-481e48b27ebf"). InnerVolumeSpecName "kube-api-access-bh8dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.074387 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee34933d-92fc-4723-b232-481e48b27ebf" (UID: "ee34933d-92fc-4723-b232-481e48b27ebf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.074397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ee34933d-92fc-4723-b232-481e48b27ebf" (UID: "ee34933d-92fc-4723-b232-481e48b27ebf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.075511 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config" (OuterVolumeSpecName: "config") pod "ee34933d-92fc-4723-b232-481e48b27ebf" (UID: "ee34933d-92fc-4723-b232-481e48b27ebf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.077529 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ee34933d-92fc-4723-b232-481e48b27ebf" (UID: "ee34933d-92fc-4723-b232-481e48b27ebf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.125658 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.125697 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.125709 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.125718 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee34933d-92fc-4723-b232-481e48b27ebf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.125731 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh8dz\" (UniqueName: \"kubernetes.io/projected/ee34933d-92fc-4723-b232-481e48b27ebf-kube-api-access-bh8dz\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.275828 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" event={"ID":"ee34933d-92fc-4723-b232-481e48b27ebf","Type":"ContainerDied","Data":"160016cea90eb609a06808b538868a6cc195fe8aa12abf272fc370a0ce203279"} Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.275851 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-bwvnp" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.275884 4762 scope.go:117] "RemoveContainer" containerID="23bc8d382d6857bb6b1145361ed8df721c39db5896196c008d510fde383469bd" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.280658 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d6ff-account-create-zwpvv" event={"ID":"11408afa-4f7f-46b2-b499-91ab1e966c35","Type":"ContainerDied","Data":"e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20"} Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.280700 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6348a1f9e995c6fd768b235aeda530e5bf574fd207398d463b45b5da9e16e20" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.280769 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d6ff-account-create-zwpvv" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.284061 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7a22-account-create-8jpv8" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.283974 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7a22-account-create-8jpv8" event={"ID":"fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92","Type":"ContainerDied","Data":"574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d"} Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.284744 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="574c2f9e85c3287c8ed881f7ddf244bd78728778c34b4f429ba870b9c22a3d2d" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.318311 4762 scope.go:117] "RemoveContainer" containerID="ef313c2cd5701a12d34f7ce3e2ab234a47acd9b4d50c85250f453a2b3c4cfda2" Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.338324 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.345208 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-bwvnp"] Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.590001 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-snp5x"] Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.813970 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xdg42"] Oct 09 13:46:03 crc kubenswrapper[4762]: W1009 13:46:03.818553 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0d76f37_0b45_48f1_bbc1_c70ab359ee4f.slice/crio-6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3 WatchSource:0}: Error finding container 6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3: Status 404 returned error can't find the container with id 6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3 Oct 09 13:46:03 crc kubenswrapper[4762]: I1009 13:46:03.820179 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-lh6rx"] Oct 09 13:46:03 crc kubenswrapper[4762]: W1009 13:46:03.821302 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddeef8d3d_5ef0_44fd_94db_aa27c317b167.slice/crio-8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0 WatchSource:0}: Error finding container 8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0: Status 404 returned error can't find the container with id 8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0 Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.295364 4762 generic.go:334] "Generic (PLEG): container finished" podID="a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" containerID="381f6321a6d83ee16e8b9bd0f85a0bb37b99c646715a13175d6e9c46ecaae4c1" exitCode=0 Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.295477 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xdg42" event={"ID":"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f","Type":"ContainerDied","Data":"381f6321a6d83ee16e8b9bd0f85a0bb37b99c646715a13175d6e9c46ecaae4c1"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.295820 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xdg42" event={"ID":"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f","Type":"ContainerStarted","Data":"6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.299577 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hl7q6" event={"ID":"8cf35063-94f0-4dd9-9282-060a7d5360cc","Type":"ContainerStarted","Data":"7f1165287ed36d82cdee80b95268e900e24a818c960dcdc79f1d39b4329d1267"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.301224 4762 generic.go:334] "Generic (PLEG): container finished" podID="de566a3c-21e4-4b4d-b555-2b040ea2b719" containerID="be7ce0db056b3871e268563364574b23f09b81e310d7357a309159870c23c165" exitCode=0 Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.301270 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-snp5x" event={"ID":"de566a3c-21e4-4b4d-b555-2b040ea2b719","Type":"ContainerDied","Data":"be7ce0db056b3871e268563364574b23f09b81e310d7357a309159870c23c165"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.301457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-snp5x" event={"ID":"de566a3c-21e4-4b4d-b555-2b040ea2b719","Type":"ContainerStarted","Data":"a973c86d46aa0e848ba4d80f8ddb494143dc3e43bf1cf25ff0ab199355e355f5"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.306560 4762 generic.go:334] "Generic (PLEG): container finished" podID="deef8d3d-5ef0-44fd-94db-aa27c317b167" containerID="39a0ee651bbcd5526e2c7c628c3e0a8cf4e760912db0d28876e7f3c2c1c52b75" exitCode=0 Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.306607 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lh6rx" event={"ID":"deef8d3d-5ef0-44fd-94db-aa27c317b167","Type":"ContainerDied","Data":"39a0ee651bbcd5526e2c7c628c3e0a8cf4e760912db0d28876e7f3c2c1c52b75"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.306646 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lh6rx" event={"ID":"deef8d3d-5ef0-44fd-94db-aa27c317b167","Type":"ContainerStarted","Data":"8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0"} Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.355071 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hl7q6" podStartSLOduration=3.314053425 podStartE2EDuration="17.355044623s" podCreationTimestamp="2025-10-09 13:45:47 +0000 UTC" firstStartedPulling="2025-10-09 13:45:48.716395937 +0000 UTC m=+1224.490186976" lastFinishedPulling="2025-10-09 13:46:02.757387135 +0000 UTC m=+1238.531178174" observedRunningTime="2025-10-09 13:46:04.351438605 +0000 UTC m=+1240.125229664" watchObservedRunningTime="2025-10-09 13:46:04.355044623 +0000 UTC m=+1240.128835662" Oct 09 13:46:04 crc kubenswrapper[4762]: I1009 13:46:04.977517 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" path="/var/lib/kubelet/pods/ee34933d-92fc-4723-b232-481e48b27ebf/volumes" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.571389 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lh6rx" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.674101 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nznh8\" (UniqueName: \"kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8\") pod \"deef8d3d-5ef0-44fd-94db-aa27c317b167\" (UID: \"deef8d3d-5ef0-44fd-94db-aa27c317b167\") " Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.681733 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8" (OuterVolumeSpecName: "kube-api-access-nznh8") pod "deef8d3d-5ef0-44fd-94db-aa27c317b167" (UID: "deef8d3d-5ef0-44fd-94db-aa27c317b167"). InnerVolumeSpecName "kube-api-access-nznh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.719167 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-snp5x" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.726627 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xdg42" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.776269 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nznh8\" (UniqueName: \"kubernetes.io/projected/deef8d3d-5ef0-44fd-94db-aa27c317b167-kube-api-access-nznh8\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.877034 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpfzj\" (UniqueName: \"kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj\") pod \"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f\" (UID: \"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f\") " Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.877189 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcqtx\" (UniqueName: \"kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx\") pod \"de566a3c-21e4-4b4d-b555-2b040ea2b719\" (UID: \"de566a3c-21e4-4b4d-b555-2b040ea2b719\") " Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.880085 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj" (OuterVolumeSpecName: "kube-api-access-lpfzj") pod "a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" (UID: "a0d76f37-0b45-48f1-bbc1-c70ab359ee4f"). InnerVolumeSpecName "kube-api-access-lpfzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.881184 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx" (OuterVolumeSpecName: "kube-api-access-kcqtx") pod "de566a3c-21e4-4b4d-b555-2b040ea2b719" (UID: "de566a3c-21e4-4b4d-b555-2b040ea2b719"). InnerVolumeSpecName "kube-api-access-kcqtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.978902 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpfzj\" (UniqueName: \"kubernetes.io/projected/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f-kube-api-access-lpfzj\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:05 crc kubenswrapper[4762]: I1009 13:46:05.980064 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcqtx\" (UniqueName: \"kubernetes.io/projected/de566a3c-21e4-4b4d-b555-2b040ea2b719-kube-api-access-kcqtx\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.331313 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-lh6rx" event={"ID":"deef8d3d-5ef0-44fd-94db-aa27c317b167","Type":"ContainerDied","Data":"8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0"} Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.331338 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-lh6rx" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.331345 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a00c4c7141ad602c1738357fb826f01e2e7ed2972d2ee0e7b27cc9105399eb0" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.333430 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xdg42" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.333483 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xdg42" event={"ID":"a0d76f37-0b45-48f1-bbc1-c70ab359ee4f","Type":"ContainerDied","Data":"6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3"} Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.333577 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f0ee0f89fc8ee0f815546616bc529ed934ca0d09768dc0227b13c52ae5a2bd3" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.335109 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-snp5x" event={"ID":"de566a3c-21e4-4b4d-b555-2b040ea2b719","Type":"ContainerDied","Data":"a973c86d46aa0e848ba4d80f8ddb494143dc3e43bf1cf25ff0ab199355e355f5"} Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.335154 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a973c86d46aa0e848ba4d80f8ddb494143dc3e43bf1cf25ff0ab199355e355f5" Oct 09 13:46:06 crc kubenswrapper[4762]: I1009 13:46:06.335189 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-snp5x" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.546662 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4cm5b"] Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547313 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deef8d3d-5ef0-44fd-94db-aa27c317b167" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547326 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="deef8d3d-5ef0-44fd-94db-aa27c317b167" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547337 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de566a3c-21e4-4b4d-b555-2b040ea2b719" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547342 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="de566a3c-21e4-4b4d-b555-2b040ea2b719" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547353 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="dnsmasq-dns" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547359 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="dnsmasq-dns" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547374 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547380 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547395 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="init" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547404 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="init" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547412 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11408afa-4f7f-46b2-b499-91ab1e966c35" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547419 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="11408afa-4f7f-46b2-b499-91ab1e966c35" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: E1009 13:46:07.547444 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547451 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547602 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="de566a3c-21e4-4b4d-b555-2b040ea2b719" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547617 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee34933d-92fc-4723-b232-481e48b27ebf" containerName="dnsmasq-dns" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547646 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="deef8d3d-5ef0-44fd-94db-aa27c317b167" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547658 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="11408afa-4f7f-46b2-b499-91ab1e966c35" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547669 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" containerName="mariadb-account-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.547680 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" containerName="mariadb-database-create" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.548361 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.551937 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.552376 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zwg65" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.552525 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.556493 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.561622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4cm5b"] Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.708809 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.708966 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.708990 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbtbq\" (UniqueName: \"kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.810998 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.811374 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.811451 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbtbq\" (UniqueName: \"kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.815268 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.816072 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.830339 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbtbq\" (UniqueName: \"kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq\") pod \"keystone-db-sync-4cm5b\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:07 crc kubenswrapper[4762]: I1009 13:46:07.865786 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:08 crc kubenswrapper[4762]: I1009 13:46:08.312563 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4cm5b"] Oct 09 13:46:08 crc kubenswrapper[4762]: I1009 13:46:08.355874 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4cm5b" event={"ID":"e616fe11-8347-4520-b8e3-8aec90aac784","Type":"ContainerStarted","Data":"760636bccc0520db1ed7c922405cd57f273e35e61f3a3cf7246e0db65ebacd75"} Oct 09 13:46:11 crc kubenswrapper[4762]: I1009 13:46:11.969253 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:46:11 crc kubenswrapper[4762]: I1009 13:46:11.969806 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:46:13 crc kubenswrapper[4762]: I1009 13:46:13.408282 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4cm5b" event={"ID":"e616fe11-8347-4520-b8e3-8aec90aac784","Type":"ContainerStarted","Data":"108eadbdff693fa49f902a1ef6d6e21c2f2ad61f0c58b328f8cfb670edd35d7c"} Oct 09 13:46:13 crc kubenswrapper[4762]: I1009 13:46:13.434340 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4cm5b" podStartSLOduration=2.048521621 podStartE2EDuration="6.434312978s" podCreationTimestamp="2025-10-09 13:46:07 +0000 UTC" firstStartedPulling="2025-10-09 13:46:08.31575511 +0000 UTC m=+1244.089546149" lastFinishedPulling="2025-10-09 13:46:12.701546447 +0000 UTC m=+1248.475337506" observedRunningTime="2025-10-09 13:46:13.428727093 +0000 UTC m=+1249.202518142" watchObservedRunningTime="2025-10-09 13:46:13.434312978 +0000 UTC m=+1249.208104047" Oct 09 13:46:16 crc kubenswrapper[4762]: I1009 13:46:16.433581 4762 generic.go:334] "Generic (PLEG): container finished" podID="8cf35063-94f0-4dd9-9282-060a7d5360cc" containerID="7f1165287ed36d82cdee80b95268e900e24a818c960dcdc79f1d39b4329d1267" exitCode=0 Oct 09 13:46:16 crc kubenswrapper[4762]: I1009 13:46:16.433682 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hl7q6" event={"ID":"8cf35063-94f0-4dd9-9282-060a7d5360cc","Type":"ContainerDied","Data":"7f1165287ed36d82cdee80b95268e900e24a818c960dcdc79f1d39b4329d1267"} Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.811532 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hl7q6" Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.970654 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvjxd\" (UniqueName: \"kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd\") pod \"8cf35063-94f0-4dd9-9282-060a7d5360cc\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.970704 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data\") pod \"8cf35063-94f0-4dd9-9282-060a7d5360cc\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.970764 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data\") pod \"8cf35063-94f0-4dd9-9282-060a7d5360cc\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.970849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle\") pod \"8cf35063-94f0-4dd9-9282-060a7d5360cc\" (UID: \"8cf35063-94f0-4dd9-9282-060a7d5360cc\") " Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.977651 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8cf35063-94f0-4dd9-9282-060a7d5360cc" (UID: "8cf35063-94f0-4dd9-9282-060a7d5360cc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.978295 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd" (OuterVolumeSpecName: "kube-api-access-bvjxd") pod "8cf35063-94f0-4dd9-9282-060a7d5360cc" (UID: "8cf35063-94f0-4dd9-9282-060a7d5360cc"). InnerVolumeSpecName "kube-api-access-bvjxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:17 crc kubenswrapper[4762]: I1009 13:46:17.997189 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cf35063-94f0-4dd9-9282-060a7d5360cc" (UID: "8cf35063-94f0-4dd9-9282-060a7d5360cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.030369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data" (OuterVolumeSpecName: "config-data") pod "8cf35063-94f0-4dd9-9282-060a7d5360cc" (UID: "8cf35063-94f0-4dd9-9282-060a7d5360cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.072983 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.073024 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvjxd\" (UniqueName: \"kubernetes.io/projected/8cf35063-94f0-4dd9-9282-060a7d5360cc-kube-api-access-bvjxd\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.073045 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.073057 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8cf35063-94f0-4dd9-9282-060a7d5360cc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.453787 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hl7q6" event={"ID":"8cf35063-94f0-4dd9-9282-060a7d5360cc","Type":"ContainerDied","Data":"b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7"} Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.453826 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b12bfd42be7ad8c8354dd3efb6fc9a53216f744ab311bb679ad20cf1fef8b4a7" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.453847 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hl7q6" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.765849 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1fff-account-create-vhfsm"] Oct 09 13:46:18 crc kubenswrapper[4762]: E1009 13:46:18.766446 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf35063-94f0-4dd9-9282-060a7d5360cc" containerName="glance-db-sync" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.766512 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf35063-94f0-4dd9-9282-060a7d5360cc" containerName="glance-db-sync" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.766760 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf35063-94f0-4dd9-9282-060a7d5360cc" containerName="glance-db-sync" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.767338 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.769991 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.778408 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1fff-account-create-vhfsm"] Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.823241 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-272a-account-create-qwhwd"] Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.829741 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.832061 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.836368 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-272a-account-create-qwhwd"] Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.885751 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nmlj\" (UniqueName: \"kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj\") pod \"cinder-1fff-account-create-vhfsm\" (UID: \"b026cfa5-4949-45d8-9d57-ec509cc84936\") " pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.891885 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.893955 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.908172 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989247 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989336 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989444 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989466 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989514 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989623 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6ckk\" (UniqueName: \"kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989727 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nmlj\" (UniqueName: \"kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj\") pod \"cinder-1fff-account-create-vhfsm\" (UID: \"b026cfa5-4949-45d8-9d57-ec509cc84936\") " pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:18 crc kubenswrapper[4762]: I1009 13:46:18.989864 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glgxl\" (UniqueName: \"kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl\") pod \"barbican-272a-account-create-qwhwd\" (UID: \"f1656cb3-3fd7-49fd-8434-101406b98d60\") " pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.012567 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nmlj\" (UniqueName: \"kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj\") pod \"cinder-1fff-account-create-vhfsm\" (UID: \"b026cfa5-4949-45d8-9d57-ec509cc84936\") " pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091035 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091402 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091473 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091493 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091511 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6ckk\" (UniqueName: \"kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091596 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glgxl\" (UniqueName: \"kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl\") pod \"barbican-272a-account-create-qwhwd\" (UID: \"f1656cb3-3fd7-49fd-8434-101406b98d60\") " pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.091656 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.092583 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.092679 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.092784 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.092942 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.093000 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.113727 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glgxl\" (UniqueName: \"kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl\") pod \"barbican-272a-account-create-qwhwd\" (UID: \"f1656cb3-3fd7-49fd-8434-101406b98d60\") " pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.114520 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6ckk\" (UniqueName: \"kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk\") pod \"dnsmasq-dns-895cf5cf-khqhn\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.141988 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b30a-account-create-ctfdm"] Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.143495 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.148556 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.149513 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b30a-account-create-ctfdm"] Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.155005 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.216896 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.305585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld42f\" (UniqueName: \"kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f\") pod \"neutron-b30a-account-create-ctfdm\" (UID: \"15ee54a3-2cd6-4203-a472-9274c1ea9018\") " pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.407594 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld42f\" (UniqueName: \"kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f\") pod \"neutron-b30a-account-create-ctfdm\" (UID: \"15ee54a3-2cd6-4203-a472-9274c1ea9018\") " pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.427274 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld42f\" (UniqueName: \"kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f\") pod \"neutron-b30a-account-create-ctfdm\" (UID: \"15ee54a3-2cd6-4203-a472-9274c1ea9018\") " pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.463743 4762 generic.go:334] "Generic (PLEG): container finished" podID="e616fe11-8347-4520-b8e3-8aec90aac784" containerID="108eadbdff693fa49f902a1ef6d6e21c2f2ad61f0c58b328f8cfb670edd35d7c" exitCode=0 Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.463790 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4cm5b" event={"ID":"e616fe11-8347-4520-b8e3-8aec90aac784","Type":"ContainerDied","Data":"108eadbdff693fa49f902a1ef6d6e21c2f2ad61f0c58b328f8cfb670edd35d7c"} Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.598030 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.611608 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1fff-account-create-vhfsm"] Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.637721 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:19 crc kubenswrapper[4762]: W1009 13:46:19.700864 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4af12e84_db98_4019_a826_0fe154662f7e.slice/crio-959a66039b730cf4e172aad86d5f157d15d5bf1f99cf2602cf068c5194ddff27 WatchSource:0}: Error finding container 959a66039b730cf4e172aad86d5f157d15d5bf1f99cf2602cf068c5194ddff27: Status 404 returned error can't find the container with id 959a66039b730cf4e172aad86d5f157d15d5bf1f99cf2602cf068c5194ddff27 Oct 09 13:46:19 crc kubenswrapper[4762]: I1009 13:46:19.754811 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-272a-account-create-qwhwd"] Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.186622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b30a-account-create-ctfdm"] Oct 09 13:46:20 crc kubenswrapper[4762]: W1009 13:46:20.232902 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15ee54a3_2cd6_4203_a472_9274c1ea9018.slice/crio-9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507 WatchSource:0}: Error finding container 9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507: Status 404 returned error can't find the container with id 9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507 Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.476454 4762 generic.go:334] "Generic (PLEG): container finished" podID="f1656cb3-3fd7-49fd-8434-101406b98d60" containerID="2e385bd14e203e4ad5bed79e91a917e47f6669de00d6888a65bc2ff11aa9deea" exitCode=0 Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.476562 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-272a-account-create-qwhwd" event={"ID":"f1656cb3-3fd7-49fd-8434-101406b98d60","Type":"ContainerDied","Data":"2e385bd14e203e4ad5bed79e91a917e47f6669de00d6888a65bc2ff11aa9deea"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.476686 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-272a-account-create-qwhwd" event={"ID":"f1656cb3-3fd7-49fd-8434-101406b98d60","Type":"ContainerStarted","Data":"5ddf5dadcec8229cf32b0dbd1c2111af7dac9c1822d076b9892a2070cedaf6e2"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.478886 4762 generic.go:334] "Generic (PLEG): container finished" podID="4af12e84-db98-4019-a826-0fe154662f7e" containerID="66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25" exitCode=0 Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.478963 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" event={"ID":"4af12e84-db98-4019-a826-0fe154662f7e","Type":"ContainerDied","Data":"66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.478995 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" event={"ID":"4af12e84-db98-4019-a826-0fe154662f7e","Type":"ContainerStarted","Data":"959a66039b730cf4e172aad86d5f157d15d5bf1f99cf2602cf068c5194ddff27"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.482026 4762 generic.go:334] "Generic (PLEG): container finished" podID="b026cfa5-4949-45d8-9d57-ec509cc84936" containerID="7c4c8ceaa0d333127b5a811c5ab2eab36a0ca07212c0bb9ecbf25c53b6126818" exitCode=0 Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.482127 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1fff-account-create-vhfsm" event={"ID":"b026cfa5-4949-45d8-9d57-ec509cc84936","Type":"ContainerDied","Data":"7c4c8ceaa0d333127b5a811c5ab2eab36a0ca07212c0bb9ecbf25c53b6126818"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.482190 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1fff-account-create-vhfsm" event={"ID":"b026cfa5-4949-45d8-9d57-ec509cc84936","Type":"ContainerStarted","Data":"fe8fddadf1ddf88606a3ae9fa6eda480dc50cc84fefb0b8bd491d02acd7d18a6"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.483495 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b30a-account-create-ctfdm" event={"ID":"15ee54a3-2cd6-4203-a472-9274c1ea9018","Type":"ContainerStarted","Data":"9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507"} Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.833739 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.943128 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle\") pod \"e616fe11-8347-4520-b8e3-8aec90aac784\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.943283 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbtbq\" (UniqueName: \"kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq\") pod \"e616fe11-8347-4520-b8e3-8aec90aac784\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.943335 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data\") pod \"e616fe11-8347-4520-b8e3-8aec90aac784\" (UID: \"e616fe11-8347-4520-b8e3-8aec90aac784\") " Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.948705 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq" (OuterVolumeSpecName: "kube-api-access-lbtbq") pod "e616fe11-8347-4520-b8e3-8aec90aac784" (UID: "e616fe11-8347-4520-b8e3-8aec90aac784"). InnerVolumeSpecName "kube-api-access-lbtbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.981796 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e616fe11-8347-4520-b8e3-8aec90aac784" (UID: "e616fe11-8347-4520-b8e3-8aec90aac784"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:20 crc kubenswrapper[4762]: I1009 13:46:20.986980 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data" (OuterVolumeSpecName: "config-data") pod "e616fe11-8347-4520-b8e3-8aec90aac784" (UID: "e616fe11-8347-4520-b8e3-8aec90aac784"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.045099 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.045386 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbtbq\" (UniqueName: \"kubernetes.io/projected/e616fe11-8347-4520-b8e3-8aec90aac784-kube-api-access-lbtbq\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.045467 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e616fe11-8347-4520-b8e3-8aec90aac784-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.493667 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4cm5b" event={"ID":"e616fe11-8347-4520-b8e3-8aec90aac784","Type":"ContainerDied","Data":"760636bccc0520db1ed7c922405cd57f273e35e61f3a3cf7246e0db65ebacd75"} Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.494747 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="760636bccc0520db1ed7c922405cd57f273e35e61f3a3cf7246e0db65ebacd75" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.494853 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4cm5b" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.517522 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" event={"ID":"4af12e84-db98-4019-a826-0fe154662f7e","Type":"ContainerStarted","Data":"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae"} Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.518493 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.520668 4762 generic.go:334] "Generic (PLEG): container finished" podID="15ee54a3-2cd6-4203-a472-9274c1ea9018" containerID="1b7bfb3ca70dcf4718ff5b80adf5e2a0e948797af3bfb684e635ead97d672951" exitCode=0 Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.520819 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b30a-account-create-ctfdm" event={"ID":"15ee54a3-2cd6-4203-a472-9274c1ea9018","Type":"ContainerDied","Data":"1b7bfb3ca70dcf4718ff5b80adf5e2a0e948797af3bfb684e635ead97d672951"} Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.565828 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" podStartSLOduration=3.565805099 podStartE2EDuration="3.565805099s" podCreationTimestamp="2025-10-09 13:46:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:21.552790232 +0000 UTC m=+1257.326581271" watchObservedRunningTime="2025-10-09 13:46:21.565805099 +0000 UTC m=+1257.339596138" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.763487 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.800894 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-pfvgh"] Oct 09 13:46:21 crc kubenswrapper[4762]: E1009 13:46:21.801449 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e616fe11-8347-4520-b8e3-8aec90aac784" containerName="keystone-db-sync" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.801475 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e616fe11-8347-4520-b8e3-8aec90aac784" containerName="keystone-db-sync" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.801709 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e616fe11-8347-4520-b8e3-8aec90aac784" containerName="keystone-db-sync" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.802387 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.806143 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.807456 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.807857 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.809615 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zwg65" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.825245 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-2f5b9"] Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.826686 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.837037 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-2f5b9"] Oct 09 13:46:21 crc kubenswrapper[4762]: I1009 13:46:21.887225 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pfvgh"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.008789 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.033589 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.033858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.033940 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034051 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034262 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034336 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzt7d\" (UniqueName: \"kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034528 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034684 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034784 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034900 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcppv\" (UniqueName: \"kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.034966 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.057236 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.136884 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glgxl\" (UniqueName: \"kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl\") pod \"f1656cb3-3fd7-49fd-8434-101406b98d60\" (UID: \"f1656cb3-3fd7-49fd-8434-101406b98d60\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137366 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137413 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137462 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcppv\" (UniqueName: \"kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137487 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137536 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137587 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137608 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137829 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137918 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137956 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzt7d\" (UniqueName: \"kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.137995 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.139215 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.143736 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.146295 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-2f5b9"] Oct 09 13:46:22 crc kubenswrapper[4762]: E1009 13:46:22.147438 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-swift-storage-0 kube-api-access-dzt7d ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" podUID="04e4465b-7d00-47ab-ac48-1468b7821d95" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.153752 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-vxzqs"] Oct 09 13:46:22 crc kubenswrapper[4762]: E1009 13:46:22.154155 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1656cb3-3fd7-49fd-8434-101406b98d60" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.154168 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1656cb3-3fd7-49fd-8434-101406b98d60" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.154320 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1656cb3-3fd7-49fd-8434-101406b98d60" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.156934 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.159085 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.159307 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl" (OuterVolumeSpecName: "kube-api-access-glgxl") pod "f1656cb3-3fd7-49fd-8434-101406b98d60" (UID: "f1656cb3-3fd7-49fd-8434-101406b98d60"). InnerVolumeSpecName "kube-api-access-glgxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.159310 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.159987 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.163081 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.163334 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p2q6s" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.163445 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.176338 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.177184 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcppv\" (UniqueName: \"kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.177466 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.177966 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.184746 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.187621 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.199326 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzt7d\" (UniqueName: \"kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d\") pod \"dnsmasq-dns-6c9c9f998c-2f5b9\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.214526 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-vxzqs"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.214575 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.215549 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.216223 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.218860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data\") pod \"keystone-bootstrap-pfvgh\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.218977 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.224523 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.224792 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.234437 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.242779 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.242895 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.242978 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.247416 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.247488 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzx7q\" (UniqueName: \"kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.247704 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glgxl\" (UniqueName: \"kubernetes.io/projected/f1656cb3-3fd7-49fd-8434-101406b98d60-kube-api-access-glgxl\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.342272 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350708 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350829 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350852 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350888 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350912 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh2n9\" (UniqueName: \"kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350934 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350972 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j46sn\" (UniqueName: \"kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.350993 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzx7q\" (UniqueName: \"kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351011 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351056 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351142 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351165 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351241 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351315 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351403 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351433 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.351462 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.352046 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.356673 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.371479 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.372628 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.384125 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzx7q\" (UniqueName: \"kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q\") pod \"placement-db-sync-vxzqs\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453174 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nmlj\" (UniqueName: \"kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj\") pod \"b026cfa5-4949-45d8-9d57-ec509cc84936\" (UID: \"b026cfa5-4949-45d8-9d57-ec509cc84936\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453478 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453549 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453594 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453664 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453701 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453726 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453761 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh2n9\" (UniqueName: \"kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453790 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j46sn\" (UniqueName: \"kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453842 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.453872 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.454008 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.454058 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.454300 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.454704 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.455317 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.456085 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.456317 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.456382 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.456427 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.456543 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.459822 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj" (OuterVolumeSpecName: "kube-api-access-4nmlj") pod "b026cfa5-4949-45d8-9d57-ec509cc84936" (UID: "b026cfa5-4949-45d8-9d57-ec509cc84936"). InnerVolumeSpecName "kube-api-access-4nmlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.466056 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.467585 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.468565 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.473626 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.477397 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh2n9\" (UniqueName: \"kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9\") pod \"ceilometer-0\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.478313 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j46sn\" (UniqueName: \"kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn\") pod \"dnsmasq-dns-57c957c4ff-xhvzb\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.538430 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1fff-account-create-vhfsm" event={"ID":"b026cfa5-4949-45d8-9d57-ec509cc84936","Type":"ContainerDied","Data":"fe8fddadf1ddf88606a3ae9fa6eda480dc50cc84fefb0b8bd491d02acd7d18a6"} Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.538490 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe8fddadf1ddf88606a3ae9fa6eda480dc50cc84fefb0b8bd491d02acd7d18a6" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.538485 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1fff-account-create-vhfsm" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.543779 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-272a-account-create-qwhwd" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.544078 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.544064 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-272a-account-create-qwhwd" event={"ID":"f1656cb3-3fd7-49fd-8434-101406b98d60","Type":"ContainerDied","Data":"5ddf5dadcec8229cf32b0dbd1c2111af7dac9c1822d076b9892a2070cedaf6e2"} Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.544754 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ddf5dadcec8229cf32b0dbd1c2111af7dac9c1822d076b9892a2070cedaf6e2" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.555690 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nmlj\" (UniqueName: \"kubernetes.io/projected/b026cfa5-4949-45d8-9d57-ec509cc84936-kube-api-access-4nmlj\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.558375 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.618395 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vxzqs" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656581 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656658 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzt7d\" (UniqueName: \"kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656783 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656807 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656836 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.656890 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0\") pod \"04e4465b-7d00-47ab-ac48-1468b7821d95\" (UID: \"04e4465b-7d00-47ab-ac48-1468b7821d95\") " Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.657027 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config" (OuterVolumeSpecName: "config") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.657269 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.657372 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.657396 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.658650 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.658719 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.658726 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.665181 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d" (OuterVolumeSpecName: "kube-api-access-dzt7d") pod "04e4465b-7d00-47ab-ac48-1468b7821d95" (UID: "04e4465b-7d00-47ab-ac48-1468b7821d95"). InnerVolumeSpecName "kube-api-access-dzt7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.673671 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.748478 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.759525 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzt7d\" (UniqueName: \"kubernetes.io/projected/04e4465b-7d00-47ab-ac48-1468b7821d95-kube-api-access-dzt7d\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.759894 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.759905 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.759914 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/04e4465b-7d00-47ab-ac48-1468b7821d95-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.924358 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:22 crc kubenswrapper[4762]: E1009 13:46:22.924780 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b026cfa5-4949-45d8-9d57-ec509cc84936" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.924801 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b026cfa5-4949-45d8-9d57-ec509cc84936" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.924966 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b026cfa5-4949-45d8-9d57-ec509cc84936" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.925971 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.928723 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.928874 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.929224 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tsjl8" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.943787 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.963942 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964023 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964051 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964105 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964133 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sndpg\" (UniqueName: \"kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964155 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.964188 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.966896 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.995860 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pfvgh"] Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.998728 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:22 crc kubenswrapper[4762]: E1009 13:46:22.999330 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ee54a3-2cd6-4203-a472-9274c1ea9018" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.999438 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ee54a3-2cd6-4203-a472-9274c1ea9018" containerName="mariadb-account-create" Oct 09 13:46:22 crc kubenswrapper[4762]: I1009 13:46:22.999757 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ee54a3-2cd6-4203-a472-9274c1ea9018" containerName="mariadb-account-create" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.002011 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.007016 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.039586 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.073789 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.073857 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074167 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g55md\" (UniqueName: \"kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074212 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074248 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074270 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sndpg\" (UniqueName: \"kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074301 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074335 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074365 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074403 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074454 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074511 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074581 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.074628 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.076150 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.076360 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.078137 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.081142 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.084569 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.094525 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.104947 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sndpg\" (UniqueName: \"kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.132022 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.190480 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld42f\" (UniqueName: \"kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f\") pod \"15ee54a3-2cd6-4203-a472-9274c1ea9018\" (UID: \"15ee54a3-2cd6-4203-a472-9274c1ea9018\") " Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191052 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g55md\" (UniqueName: \"kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191129 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191224 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191324 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191403 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191431 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.191545 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.196173 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-vxzqs"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.207433 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f" (OuterVolumeSpecName: "kube-api-access-ld42f") pod "15ee54a3-2cd6-4203-a472-9274c1ea9018" (UID: "15ee54a3-2cd6-4203-a472-9274c1ea9018"). InnerVolumeSpecName "kube-api-access-ld42f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.209878 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.221887 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.222287 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.255501 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.269677 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.273360 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.273622 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.274248 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.279456 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g55md\" (UniqueName: \"kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md\") pod \"glance-default-internal-api-0\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.294351 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld42f\" (UniqueName: \"kubernetes.io/projected/15ee54a3-2cd6-4203-a472-9274c1ea9018-kube-api-access-ld42f\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.326432 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.336061 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.416341 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.565005 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b30a-account-create-ctfdm" event={"ID":"15ee54a3-2cd6-4203-a472-9274c1ea9018","Type":"ContainerDied","Data":"9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507"} Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.565419 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9347520a58fad9ae03c29f590b3b2da763d7488c78381b9618691f6dc434b507" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.565057 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b30a-account-create-ctfdm" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.568197 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" event={"ID":"3d330351-1e4e-497c-a765-028b31e5b570","Type":"ContainerStarted","Data":"c6df809bfd1275712425b2b0f21773b14ea0105dcb0947818b9c91c2f4ddcd16"} Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.569488 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerStarted","Data":"b91cc6b46d023edfe3e07ce395acf254713551cfb8ea375978638666cdb76e58"} Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.570912 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pfvgh" event={"ID":"84218980-f997-4443-8dec-0bb2761f0527","Type":"ContainerStarted","Data":"8a918541d8920edbd8173b4d0c767ebe627df931d17a239c47eee8c9c8ba420f"} Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.572202 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="dnsmasq-dns" containerID="cri-o://7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae" gracePeriod=10 Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.572350 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-2f5b9" Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.572365 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vxzqs" event={"ID":"f8ef7222-071d-403b-8356-7645662e1226","Type":"ContainerStarted","Data":"44f31119989e50a82a1056b4405f0b9f5e91c2d3c7e0c5a434173e38e6d6c641"} Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.649517 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-2f5b9"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.655961 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-2f5b9"] Oct 09 13:46:23 crc kubenswrapper[4762]: I1009 13:46:23.934517 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.040408 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-glh4c"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.041661 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.043902 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.044148 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.044286 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-89vpf" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.051611 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-glh4c"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.107917 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.182554 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.211280 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6ckk\" (UniqueName: \"kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.213139 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.213198 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.213283 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.213358 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.213397 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb\") pod \"4af12e84-db98-4019-a826-0fe154662f7e\" (UID: \"4af12e84-db98-4019-a826-0fe154662f7e\") " Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.214581 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.215128 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp64z\" (UniqueName: \"kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.215193 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.215225 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.215472 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.215509 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.227043 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk" (OuterVolumeSpecName: "kube-api-access-x6ckk") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "kube-api-access-x6ckk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.316648 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.316713 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp64z\" (UniqueName: \"kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.316812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.316884 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.316927 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.317007 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.317027 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.317614 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6ckk\" (UniqueName: \"kubernetes.io/projected/4af12e84-db98-4019-a826-0fe154662f7e-kube-api-access-x6ckk\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.334124 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.337773 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.345412 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-4tqkk"] Oct 09 13:46:24 crc kubenswrapper[4762]: E1009 13:46:24.345923 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="dnsmasq-dns" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.345943 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="dnsmasq-dns" Oct 09 13:46:24 crc kubenswrapper[4762]: E1009 13:46:24.345971 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="init" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.345978 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="init" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.346128 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4af12e84-db98-4019-a826-0fe154662f7e" containerName="dnsmasq-dns" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.346774 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.352890 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.353161 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bzkm2" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.379781 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4tqkk"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.387289 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config" (OuterVolumeSpecName: "config") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.415086 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.430554 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp64z\" (UniqueName: \"kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.442954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data\") pod \"cinder-db-sync-glh4c\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.450835 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.477570 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.554709 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.557506 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.557562 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.557613 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnkqp\" (UniqueName: \"kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.557827 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.557844 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.561107 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-m6k5r"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.564436 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.569787 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m6k5r"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.587609 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hlnc6" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.587987 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.588225 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.593194 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.612339 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerStarted","Data":"7b6fe981c9f4489d1addd630f0e4db607e9f5b730125b35294bae94f039a63eb"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.634704 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4af12e84-db98-4019-a826-0fe154662f7e" (UID: "4af12e84-db98-4019-a826-0fe154662f7e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.651330 4762 generic.go:334] "Generic (PLEG): container finished" podID="3d330351-1e4e-497c-a765-028b31e5b570" containerID="5bec5512233bb7fefded6edab85945d57deb9974dd2dc7c5887d4c3c86987bd8" exitCode=0 Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.651435 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" event={"ID":"3d330351-1e4e-497c-a765-028b31e5b570","Type":"ContainerDied","Data":"5bec5512233bb7fefded6edab85945d57deb9974dd2dc7c5887d4c3c86987bd8"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.656010 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pfvgh" event={"ID":"84218980-f997-4443-8dec-0bb2761f0527","Type":"ContainerStarted","Data":"92ccbd0ec3ae6d70023bd3a2966dd8b1cf46d84acf99d75b2e1a9366e032a0b9"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658711 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658755 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658791 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmpkz\" (UniqueName: \"kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnkqp\" (UniqueName: \"kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658847 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.658880 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.660145 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.661968 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af12e84-db98-4019-a826-0fe154662f7e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.674500 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.674504 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.683756 4762 generic.go:334] "Generic (PLEG): container finished" podID="4af12e84-db98-4019-a826-0fe154662f7e" containerID="7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae" exitCode=0 Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.683822 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.683875 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" event={"ID":"4af12e84-db98-4019-a826-0fe154662f7e","Type":"ContainerDied","Data":"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.683910 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-khqhn" event={"ID":"4af12e84-db98-4019-a826-0fe154662f7e","Type":"ContainerDied","Data":"959a66039b730cf4e172aad86d5f157d15d5bf1f99cf2602cf068c5194ddff27"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.683933 4762 scope.go:117] "RemoveContainer" containerID="7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.688004 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnkqp\" (UniqueName: \"kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp\") pod \"barbican-db-sync-4tqkk\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.696703 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-glh4c" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.708283 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerStarted","Data":"6e26a3e2de2a0beed8e9431ac797befd9a52c2676a1cda6989c98ea6e2a2f77c"} Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.714115 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-pfvgh" podStartSLOduration=3.714093602 podStartE2EDuration="3.714093602s" podCreationTimestamp="2025-10-09 13:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:24.708464016 +0000 UTC m=+1260.482255075" watchObservedRunningTime="2025-10-09 13:46:24.714093602 +0000 UTC m=+1260.487884641" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.736689 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.746232 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-khqhn"] Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.763476 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.764245 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.766659 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmpkz\" (UniqueName: \"kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.769056 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.769994 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.774757 4762 scope.go:117] "RemoveContainer" containerID="66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.785699 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmpkz\" (UniqueName: \"kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz\") pod \"neutron-db-sync-m6k5r\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.807564 4762 scope.go:117] "RemoveContainer" containerID="7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae" Oct 09 13:46:24 crc kubenswrapper[4762]: E1009 13:46:24.808135 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae\": container with ID starting with 7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae not found: ID does not exist" containerID="7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.808184 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae"} err="failed to get container status \"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae\": rpc error: code = NotFound desc = could not find container \"7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae\": container with ID starting with 7e15d289baeff1ce19d7b3ca580245b7561af443b7f6aa7373c69ce247ca09ae not found: ID does not exist" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.808206 4762 scope.go:117] "RemoveContainer" containerID="66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25" Oct 09 13:46:24 crc kubenswrapper[4762]: E1009 13:46:24.808681 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25\": container with ID starting with 66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25 not found: ID does not exist" containerID="66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.808724 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25"} err="failed to get container status \"66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25\": rpc error: code = NotFound desc = could not find container \"66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25\": container with ID starting with 66509cea160b32963283708f73344553a0ab99bfc5647d2cbf29b10309fb1e25 not found: ID does not exist" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.809041 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.964803 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hlnc6" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.972046 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.999337 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04e4465b-7d00-47ab-ac48-1468b7821d95" path="/var/lib/kubelet/pods/04e4465b-7d00-47ab-ac48-1468b7821d95/volumes" Oct 09 13:46:24 crc kubenswrapper[4762]: I1009 13:46:24.999919 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4af12e84-db98-4019-a826-0fe154662f7e" path="/var/lib/kubelet/pods/4af12e84-db98-4019-a826-0fe154662f7e/volumes" Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.441622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-glh4c"] Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.494476 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4tqkk"] Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.532742 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.644466 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.654403 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.704871 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m6k5r"] Oct 09 13:46:25 crc kubenswrapper[4762]: W1009 13:46:25.727938 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2acc43c_4777_4ee7_8cd9_1314007c0be8.slice/crio-eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6 WatchSource:0}: Error finding container eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6: Status 404 returned error can't find the container with id eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6 Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.730904 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4tqkk" event={"ID":"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d","Type":"ContainerStarted","Data":"5bdd9cd488a5fc94592c34e66d22a26decf414dac7995cfc3075b1329c42d861"} Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.754741 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-glh4c" event={"ID":"5bba279a-35cf-4a4e-8632-2098cad8fa08","Type":"ContainerStarted","Data":"95ce3e0c8bd79e6f7d208870a253409f666f6c3cf158abbd7610baf5e9be47bc"} Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.757065 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerStarted","Data":"cb8d35c43d0a57f326915229974cdffdc516b284b6b88c5498470997e7f01c6b"} Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.780278 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" event={"ID":"3d330351-1e4e-497c-a765-028b31e5b570","Type":"ContainerStarted","Data":"47711bb0d3cd74fc5f368a4255a6bbe6f5cd820a881368bd09d8039fe16b962a"} Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.783157 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.803444 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerStarted","Data":"6d2711bd3f242761497bd8dbfc1b5ec70c15e1a87585b9a758a70f517a0dc495"} Oct 09 13:46:25 crc kubenswrapper[4762]: I1009 13:46:25.820762 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" podStartSLOduration=3.8207460920000003 podStartE2EDuration="3.820746092s" podCreationTimestamp="2025-10-09 13:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:25.804089948 +0000 UTC m=+1261.577880987" watchObservedRunningTime="2025-10-09 13:46:25.820746092 +0000 UTC m=+1261.594537131" Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.841904 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerStarted","Data":"26b44f2242d726eb606a4d05feba76c7c495e21b4ac967c6bac547fb3737e766"} Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.842288 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-httpd" containerID="cri-o://26b44f2242d726eb606a4d05feba76c7c495e21b4ac967c6bac547fb3737e766" gracePeriod=30 Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.842200 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-log" containerID="cri-o://6d2711bd3f242761497bd8dbfc1b5ec70c15e1a87585b9a758a70f517a0dc495" gracePeriod=30 Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.846960 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerStarted","Data":"fd4ba9a220a47e2b1e6c41657c714c847f2fbe05a5a726c8e6e82c8552f73e90"} Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.847027 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-httpd" containerID="cri-o://fd4ba9a220a47e2b1e6c41657c714c847f2fbe05a5a726c8e6e82c8552f73e90" gracePeriod=30 Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.846995 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-log" containerID="cri-o://cb8d35c43d0a57f326915229974cdffdc516b284b6b88c5498470997e7f01c6b" gracePeriod=30 Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.850092 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m6k5r" event={"ID":"f2acc43c-4777-4ee7-8cd9-1314007c0be8","Type":"ContainerStarted","Data":"7083d406a3f63fd2886bc29f3375460879083ff10e1ca9d1fd6c743eeab66078"} Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.850142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m6k5r" event={"ID":"f2acc43c-4777-4ee7-8cd9-1314007c0be8","Type":"ContainerStarted","Data":"eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6"} Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.870440 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.870416479 podStartE2EDuration="5.870416479s" podCreationTimestamp="2025-10-09 13:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:26.869069006 +0000 UTC m=+1262.642860045" watchObservedRunningTime="2025-10-09 13:46:26.870416479 +0000 UTC m=+1262.644207518" Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.888118 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-m6k5r" podStartSLOduration=2.888096408 podStartE2EDuration="2.888096408s" podCreationTimestamp="2025-10-09 13:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:26.886665123 +0000 UTC m=+1262.660456172" watchObservedRunningTime="2025-10-09 13:46:26.888096408 +0000 UTC m=+1262.661887447" Oct 09 13:46:26 crc kubenswrapper[4762]: I1009 13:46:26.962566 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.962539303 podStartE2EDuration="5.962539303s" podCreationTimestamp="2025-10-09 13:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:26.91499257 +0000 UTC m=+1262.688783609" watchObservedRunningTime="2025-10-09 13:46:26.962539303 +0000 UTC m=+1262.736330342" Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.867263 4762 generic.go:334] "Generic (PLEG): container finished" podID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerID="fd4ba9a220a47e2b1e6c41657c714c847f2fbe05a5a726c8e6e82c8552f73e90" exitCode=0 Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.867764 4762 generic.go:334] "Generic (PLEG): container finished" podID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerID="cb8d35c43d0a57f326915229974cdffdc516b284b6b88c5498470997e7f01c6b" exitCode=143 Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.867478 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerDied","Data":"fd4ba9a220a47e2b1e6c41657c714c847f2fbe05a5a726c8e6e82c8552f73e90"} Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.867918 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerDied","Data":"cb8d35c43d0a57f326915229974cdffdc516b284b6b88c5498470997e7f01c6b"} Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.873038 4762 generic.go:334] "Generic (PLEG): container finished" podID="550dce7e-f8f7-4df2-b338-028e9726c591" containerID="26b44f2242d726eb606a4d05feba76c7c495e21b4ac967c6bac547fb3737e766" exitCode=0 Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.873064 4762 generic.go:334] "Generic (PLEG): container finished" podID="550dce7e-f8f7-4df2-b338-028e9726c591" containerID="6d2711bd3f242761497bd8dbfc1b5ec70c15e1a87585b9a758a70f517a0dc495" exitCode=143 Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.873093 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerDied","Data":"26b44f2242d726eb606a4d05feba76c7c495e21b4ac967c6bac547fb3737e766"} Oct 09 13:46:27 crc kubenswrapper[4762]: I1009 13:46:27.873136 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerDied","Data":"6d2711bd3f242761497bd8dbfc1b5ec70c15e1a87585b9a758a70f517a0dc495"} Oct 09 13:46:28 crc kubenswrapper[4762]: I1009 13:46:28.893088 4762 generic.go:334] "Generic (PLEG): container finished" podID="84218980-f997-4443-8dec-0bb2761f0527" containerID="92ccbd0ec3ae6d70023bd3a2966dd8b1cf46d84acf99d75b2e1a9366e032a0b9" exitCode=0 Oct 09 13:46:28 crc kubenswrapper[4762]: I1009 13:46:28.893141 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pfvgh" event={"ID":"84218980-f997-4443-8dec-0bb2761f0527","Type":"ContainerDied","Data":"92ccbd0ec3ae6d70023bd3a2966dd8b1cf46d84acf99d75b2e1a9366e032a0b9"} Oct 09 13:46:32 crc kubenswrapper[4762]: I1009 13:46:32.751176 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:46:32 crc kubenswrapper[4762]: I1009 13:46:32.822807 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:46:32 crc kubenswrapper[4762]: I1009 13:46:32.823431 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" containerID="cri-o://1a1d7f56dafc809021ef97bf0a3dd8ebb7abbcae63d2e822e02f7c3c37e43677" gracePeriod=10 Oct 09 13:46:33 crc kubenswrapper[4762]: I1009 13:46:33.941318 4762 generic.go:334] "Generic (PLEG): container finished" podID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerID="1a1d7f56dafc809021ef97bf0a3dd8ebb7abbcae63d2e822e02f7c3c37e43677" exitCode=0 Oct 09 13:46:33 crc kubenswrapper[4762]: I1009 13:46:33.941376 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" event={"ID":"15ffec36-732e-48f3-b5b8-52038bc8da8a","Type":"ContainerDied","Data":"1a1d7f56dafc809021ef97bf0a3dd8ebb7abbcae63d2e822e02f7c3c37e43677"} Oct 09 13:46:34 crc kubenswrapper[4762]: I1009 13:46:34.740913 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Oct 09 13:46:39 crc kubenswrapper[4762]: I1009 13:46:39.740579 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.348534 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.349161 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zzx7q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-vxzqs_openstack(f8ef7222-071d-403b-8356-7645662e1226): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.350526 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-vxzqs" podUID="f8ef7222-071d-403b-8356-7645662e1226" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.389251 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.403076 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.412788 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499742 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499794 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499831 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499858 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499896 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499972 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.499997 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500026 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500052 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500127 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500444 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500497 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs" (OuterVolumeSpecName: "logs") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.500682 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs" (OuterVolumeSpecName: "logs") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501043 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g55md\" (UniqueName: \"kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501074 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501102 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501174 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501210 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcppv\" (UniqueName: \"kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501260 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data\") pod \"550dce7e-f8f7-4df2-b338-028e9726c591\" (UID: \"550dce7e-f8f7-4df2-b338-028e9726c591\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501286 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501312 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts\") pod \"84218980-f997-4443-8dec-0bb2761f0527\" (UID: \"84218980-f997-4443-8dec-0bb2761f0527\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501335 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sndpg\" (UniqueName: \"kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg\") pod \"b59f3d3a-1320-45ea-9922-1e5c47636f98\" (UID: \"b59f3d3a-1320-45ea-9922-1e5c47636f98\") " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501800 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.501816 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.503105 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b59f3d3a-1320-45ea-9922-1e5c47636f98-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.502942 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.505710 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.507324 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.507498 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts" (OuterVolumeSpecName: "scripts") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.507525 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.507536 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv" (OuterVolumeSpecName: "kube-api-access-lcppv") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "kube-api-access-lcppv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.508098 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.511750 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts" (OuterVolumeSpecName: "scripts") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.511949 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg" (OuterVolumeSpecName: "kube-api-access-sndpg") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "kube-api-access-sndpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.512049 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts" (OuterVolumeSpecName: "scripts") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.533241 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md" (OuterVolumeSpecName: "kube-api-access-g55md") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "kube-api-access-g55md". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.539915 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data" (OuterVolumeSpecName: "config-data") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.545237 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.553067 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.557857 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84218980-f997-4443-8dec-0bb2761f0527" (UID: "84218980-f997-4443-8dec-0bb2761f0527"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.571942 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data" (OuterVolumeSpecName: "config-data") pod "550dce7e-f8f7-4df2-b338-028e9726c591" (UID: "550dce7e-f8f7-4df2-b338-028e9726c591"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.586710 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data" (OuterVolumeSpecName: "config-data") pod "b59f3d3a-1320-45ea-9922-1e5c47636f98" (UID: "b59f3d3a-1320-45ea-9922-1e5c47636f98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.605463 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606809 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606885 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606903 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g55md\" (UniqueName: \"kubernetes.io/projected/550dce7e-f8f7-4df2-b338-028e9726c591-kube-api-access-g55md\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606944 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606958 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/550dce7e-f8f7-4df2-b338-028e9726c591-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606969 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcppv\" (UniqueName: \"kubernetes.io/projected/84218980-f997-4443-8dec-0bb2761f0527-kube-api-access-lcppv\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606979 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.606989 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607000 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sndpg\" (UniqueName: \"kubernetes.io/projected/b59f3d3a-1320-45ea-9922-1e5c47636f98-kube-api-access-sndpg\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607010 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607020 4762 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84218980-f997-4443-8dec-0bb2761f0527-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607029 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607039 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607049 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/550dce7e-f8f7-4df2-b338-028e9726c591-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607059 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.607068 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b59f3d3a-1320-45ea-9922-1e5c47636f98-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.627301 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.635393 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.708941 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: I1009 13:46:40.708985 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.867060 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.867199 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hnkqp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-4tqkk_openstack(5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:46:40 crc kubenswrapper[4762]: E1009 13:46:40.869311 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-4tqkk" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.016182 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pfvgh" event={"ID":"84218980-f997-4443-8dec-0bb2761f0527","Type":"ContainerDied","Data":"8a918541d8920edbd8173b4d0c767ebe627df931d17a239c47eee8c9c8ba420f"} Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.016238 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a918541d8920edbd8173b4d0c767ebe627df931d17a239c47eee8c9c8ba420f" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.016325 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pfvgh" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.019301 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"550dce7e-f8f7-4df2-b338-028e9726c591","Type":"ContainerDied","Data":"6e26a3e2de2a0beed8e9431ac797befd9a52c2676a1cda6989c98ea6e2a2f77c"} Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.019328 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.019416 4762 scope.go:117] "RemoveContainer" containerID="26b44f2242d726eb606a4d05feba76c7c495e21b4ac967c6bac547fb3737e766" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.023728 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b59f3d3a-1320-45ea-9922-1e5c47636f98","Type":"ContainerDied","Data":"7b6fe981c9f4489d1addd630f0e4db607e9f5b730125b35294bae94f039a63eb"} Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.023824 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.027310 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-4tqkk" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.027742 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-vxzqs" podUID="f8ef7222-071d-403b-8356-7645662e1226" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.069145 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.082931 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.111539 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.112270 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112288 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.112311 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112321 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.112359 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84218980-f997-4443-8dec-0bb2761f0527" containerName="keystone-bootstrap" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112369 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="84218980-f997-4443-8dec-0bb2761f0527" containerName="keystone-bootstrap" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.112384 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112392 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: E1009 13:46:41.112410 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112417 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112618 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112635 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112663 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="84218980-f997-4443-8dec-0bb2761f0527" containerName="keystone-bootstrap" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112684 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-httpd" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.112698 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" containerName="glance-log" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.113911 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.124449 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.124926 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.125524 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.126046 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tsjl8" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.128902 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.139070 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.153373 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.164153 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.166607 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.169039 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.169219 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.175977 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221386 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221530 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221564 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5x77\" (UniqueName: \"kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221616 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221728 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221794 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.221855 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.323271 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.323326 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.323366 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.323634 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.324185 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.325512 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.325776 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.325911 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcrfz\" (UniqueName: \"kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.325996 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326037 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326070 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326146 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326188 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326208 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326272 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5x77\" (UniqueName: \"kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326328 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.326493 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.331546 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.331786 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.335360 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.344176 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.345735 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5x77\" (UniqueName: \"kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.359956 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.427727 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428090 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428255 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcrfz\" (UniqueName: \"kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428292 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428327 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428390 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428418 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428566 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.428898 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.431523 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.432989 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.433091 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.433763 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.435119 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.447304 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.457189 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcrfz\" (UniqueName: \"kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.467238 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.488881 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.519181 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-pfvgh"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.534195 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-pfvgh"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.608919 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-cgmfg"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.609953 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.611709 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.611898 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zwg65" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.612261 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.613355 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.620935 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cgmfg"] Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.735538 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.735760 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcxzb\" (UniqueName: \"kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.735841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.735892 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.736000 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.736068 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838118 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838241 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcxzb\" (UniqueName: \"kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838307 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838347 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.838447 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.843030 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.844231 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.848302 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.856322 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.857720 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcxzb\" (UniqueName: \"kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.859107 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data\") pod \"keystone-bootstrap-cgmfg\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.928321 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.969005 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.969054 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.969110 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.969600 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:46:41 crc kubenswrapper[4762]: I1009 13:46:41.969663 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7" gracePeriod=600 Oct 09 13:46:42 crc kubenswrapper[4762]: E1009 13:46:42.272567 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod366049a3_acf6_488c_9f93_4557528d6d14.slice/crio-conmon-b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:46:42 crc kubenswrapper[4762]: I1009 13:46:42.977793 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="550dce7e-f8f7-4df2-b338-028e9726c591" path="/var/lib/kubelet/pods/550dce7e-f8f7-4df2-b338-028e9726c591/volumes" Oct 09 13:46:42 crc kubenswrapper[4762]: I1009 13:46:42.979113 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84218980-f997-4443-8dec-0bb2761f0527" path="/var/lib/kubelet/pods/84218980-f997-4443-8dec-0bb2761f0527/volumes" Oct 09 13:46:42 crc kubenswrapper[4762]: I1009 13:46:42.979702 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b59f3d3a-1320-45ea-9922-1e5c47636f98" path="/var/lib/kubelet/pods/b59f3d3a-1320-45ea-9922-1e5c47636f98/volumes" Oct 09 13:46:43 crc kubenswrapper[4762]: I1009 13:46:43.050744 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7" exitCode=0 Oct 09 13:46:43 crc kubenswrapper[4762]: I1009 13:46:43.050817 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7"} Oct 09 13:46:49 crc kubenswrapper[4762]: I1009 13:46:49.741380 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Oct 09 13:46:49 crc kubenswrapper[4762]: I1009 13:46:49.742184 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.150690 4762 scope.go:117] "RemoveContainer" containerID="6d2711bd3f242761497bd8dbfc1b5ec70c15e1a87585b9a758a70f517a0dc495" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.220473 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282214 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282268 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282375 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282407 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282477 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.282580 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7j6bf\" (UniqueName: \"kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf\") pod \"15ffec36-732e-48f3-b5b8-52038bc8da8a\" (UID: \"15ffec36-732e-48f3-b5b8-52038bc8da8a\") " Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.306852 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf" (OuterVolumeSpecName: "kube-api-access-7j6bf") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "kube-api-access-7j6bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.333835 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.343687 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.346573 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.350955 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config" (OuterVolumeSpecName: "config") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.362099 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "15ffec36-732e-48f3-b5b8-52038bc8da8a" (UID: "15ffec36-732e-48f3-b5b8-52038bc8da8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384362 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384403 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384417 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384428 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384439 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ffec36-732e-48f3-b5b8-52038bc8da8a-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:50 crc kubenswrapper[4762]: I1009 13:46:50.384449 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7j6bf\" (UniqueName: \"kubernetes.io/projected/15ffec36-732e-48f3-b5b8-52038bc8da8a-kube-api-access-7j6bf\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.119573 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" event={"ID":"15ffec36-732e-48f3-b5b8-52038bc8da8a","Type":"ContainerDied","Data":"61f4b21a80281fa2ba2d959b8494f14efaf9863927899db1a224295f0cfcfdb2"} Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.119765 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.140486 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.147851 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-6qflh"] Oct 09 13:46:51 crc kubenswrapper[4762]: E1009 13:46:51.579187 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 09 13:46:51 crc kubenswrapper[4762]: E1009 13:46:51.579395 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zp64z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-glh4c_openstack(5bba279a-35cf-4a4e-8632-2098cad8fa08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 13:46:51 crc kubenswrapper[4762]: E1009 13:46:51.581009 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-glh4c" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.586992 4762 scope.go:117] "RemoveContainer" containerID="fd4ba9a220a47e2b1e6c41657c714c847f2fbe05a5a726c8e6e82c8552f73e90" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.769138 4762 scope.go:117] "RemoveContainer" containerID="cb8d35c43d0a57f326915229974cdffdc516b284b6b88c5498470997e7f01c6b" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.806045 4762 scope.go:117] "RemoveContainer" containerID="d57a89488c0d1b6d5f453b504f52722aa68ac67c28d2410055ce9ab4d7c5ecc7" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.837432 4762 scope.go:117] "RemoveContainer" containerID="1a1d7f56dafc809021ef97bf0a3dd8ebb7abbcae63d2e822e02f7c3c37e43677" Oct 09 13:46:51 crc kubenswrapper[4762]: I1009 13:46:51.858694 4762 scope.go:117] "RemoveContainer" containerID="5c6c11f71c1b131886d6d9889dab31b0d63ead94f5e9e79f72323cf2e6d95dd1" Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.106168 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-cgmfg"] Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.141320 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerStarted","Data":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.142892 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.143152 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgmfg" event={"ID":"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50","Type":"ContainerStarted","Data":"14b92ed215d244987fae5c845deeb1e598bf062ed6cc9e8e6c38b9e58f901fa0"} Oct 09 13:46:52 crc kubenswrapper[4762]: W1009 13:46:52.145942 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b8ec4f0_adb7_41f8_8552_f23a5dfca100.slice/crio-74f2ca44dc16b904dd3391648182e0bc22ee69e28356595de3f675b706d33d9f WatchSource:0}: Error finding container 74f2ca44dc16b904dd3391648182e0bc22ee69e28356595de3f675b706d33d9f: Status 404 returned error can't find the container with id 74f2ca44dc16b904dd3391648182e0bc22ee69e28356595de3f675b706d33d9f Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.154769 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc"} Oct 09 13:46:52 crc kubenswrapper[4762]: E1009 13:46:52.160945 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-glh4c" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.215036 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:46:52 crc kubenswrapper[4762]: W1009 13:46:52.233055 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32c327f2_9986_410e_972f_4913811fb1d8.slice/crio-b1640c29a8e318609d2ba4aaca20d4844f5e228bbb80d9c3110f56c114c554f5 WatchSource:0}: Error finding container b1640c29a8e318609d2ba4aaca20d4844f5e228bbb80d9c3110f56c114c554f5: Status 404 returned error can't find the container with id b1640c29a8e318609d2ba4aaca20d4844f5e228bbb80d9c3110f56c114c554f5 Oct 09 13:46:52 crc kubenswrapper[4762]: I1009 13:46:52.978565 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" path="/var/lib/kubelet/pods/15ffec36-732e-48f3-b5b8-52038bc8da8a/volumes" Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.170618 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerStarted","Data":"00d36f7c2beef733a43b4d1caf4716e4c4c2c7fdd8d236f178fa3e8ccfc978d5"} Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.170967 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerStarted","Data":"b1640c29a8e318609d2ba4aaca20d4844f5e228bbb80d9c3110f56c114c554f5"} Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.174015 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerStarted","Data":"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48"} Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.174042 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerStarted","Data":"74f2ca44dc16b904dd3391648182e0bc22ee69e28356595de3f675b706d33d9f"} Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.177891 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgmfg" event={"ID":"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50","Type":"ContainerStarted","Data":"02ccd6d047a5024493f679929acd250cf01b71cda01eefb6f39d2c12d1c6caab"} Oct 09 13:46:53 crc kubenswrapper[4762]: I1009 13:46:53.196360 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-cgmfg" podStartSLOduration=12.19634325 podStartE2EDuration="12.19634325s" podCreationTimestamp="2025-10-09 13:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:53.194945226 +0000 UTC m=+1288.968736275" watchObservedRunningTime="2025-10-09 13:46:53.19634325 +0000 UTC m=+1288.970134279" Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.198344 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerStarted","Data":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.201700 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerStarted","Data":"d4a0bba0c2ec293c5641e19b646eb1580a7b1bbcfd4e0c9265398b15bcaca662"} Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.216608 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerStarted","Data":"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e"} Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.231153 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=13.231119766 podStartE2EDuration="13.231119766s" podCreationTimestamp="2025-10-09 13:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:54.226258208 +0000 UTC m=+1290.000049257" watchObservedRunningTime="2025-10-09 13:46:54.231119766 +0000 UTC m=+1290.004910805" Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.257622 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.257601169 podStartE2EDuration="13.257601169s" podCreationTimestamp="2025-10-09 13:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:46:54.248558929 +0000 UTC m=+1290.022349988" watchObservedRunningTime="2025-10-09 13:46:54.257601169 +0000 UTC m=+1290.031392208" Oct 09 13:46:54 crc kubenswrapper[4762]: I1009 13:46:54.742417 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-6qflh" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Oct 09 13:46:56 crc kubenswrapper[4762]: I1009 13:46:56.237020 4762 generic.go:334] "Generic (PLEG): container finished" podID="e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" containerID="02ccd6d047a5024493f679929acd250cf01b71cda01eefb6f39d2c12d1c6caab" exitCode=0 Oct 09 13:46:56 crc kubenswrapper[4762]: I1009 13:46:56.237386 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgmfg" event={"ID":"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50","Type":"ContainerDied","Data":"02ccd6d047a5024493f679929acd250cf01b71cda01eefb6f39d2c12d1c6caab"} Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.766358 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930524 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930662 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930731 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930780 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930937 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.930984 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcxzb\" (UniqueName: \"kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb\") pod \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\" (UID: \"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50\") " Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.935757 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb" (OuterVolumeSpecName: "kube-api-access-wcxzb") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "kube-api-access-wcxzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.937004 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.937046 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts" (OuterVolumeSpecName: "scripts") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.938787 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.963340 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data" (OuterVolumeSpecName: "config-data") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:57 crc kubenswrapper[4762]: I1009 13:46:57.968655 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" (UID: "e9fca521-9caa-4bfe-8cbc-f2b0467b2a50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033347 4762 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033696 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033705 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033713 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcxzb\" (UniqueName: \"kubernetes.io/projected/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-kube-api-access-wcxzb\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033725 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.033734 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.296983 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerStarted","Data":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.300886 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-cgmfg" event={"ID":"e9fca521-9caa-4bfe-8cbc-f2b0467b2a50","Type":"ContainerDied","Data":"14b92ed215d244987fae5c845deeb1e598bf062ed6cc9e8e6c38b9e58f901fa0"} Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.300926 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14b92ed215d244987fae5c845deeb1e598bf062ed6cc9e8e6c38b9e58f901fa0" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.301070 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-cgmfg" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.303024 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4tqkk" event={"ID":"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d","Type":"ContainerStarted","Data":"effb344f1dc334fbf893689c9197673d5eca5fb09afba6dd151e0ccd544b21f5"} Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.309667 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vxzqs" event={"ID":"f8ef7222-071d-403b-8356-7645662e1226","Type":"ContainerStarted","Data":"a841791cf90fa74f319eca5395a3688fe6dc37b0b882cf9492bb39552d6056fe"} Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.350366 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:46:58 crc kubenswrapper[4762]: E1009 13:46:58.350851 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="init" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.350873 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="init" Oct 09 13:46:58 crc kubenswrapper[4762]: E1009 13:46:58.350893 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" containerName="keystone-bootstrap" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.350904 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" containerName="keystone-bootstrap" Oct 09 13:46:58 crc kubenswrapper[4762]: E1009 13:46:58.350987 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.350997 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.350976 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-4tqkk" podStartSLOduration=2.1353200709999998 podStartE2EDuration="34.350916102s" podCreationTimestamp="2025-10-09 13:46:24 +0000 UTC" firstStartedPulling="2025-10-09 13:46:25.641130985 +0000 UTC m=+1261.414922024" lastFinishedPulling="2025-10-09 13:46:57.856727016 +0000 UTC m=+1293.630518055" observedRunningTime="2025-10-09 13:46:58.334722089 +0000 UTC m=+1294.108513138" watchObservedRunningTime="2025-10-09 13:46:58.350916102 +0000 UTC m=+1294.124707141" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.351205 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ffec36-732e-48f3-b5b8-52038bc8da8a" containerName="dnsmasq-dns" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.351236 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" containerName="keystone-bootstrap" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.351962 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.358003 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.358195 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.358332 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.358495 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zwg65" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.358665 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.363620 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.378381 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.386931 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-vxzqs" podStartSLOduration=1.750535294 podStartE2EDuration="36.386909465s" podCreationTimestamp="2025-10-09 13:46:22 +0000 UTC" firstStartedPulling="2025-10-09 13:46:23.217779353 +0000 UTC m=+1258.991570392" lastFinishedPulling="2025-10-09 13:46:57.854153514 +0000 UTC m=+1293.627944563" observedRunningTime="2025-10-09 13:46:58.357557793 +0000 UTC m=+1294.131348832" watchObservedRunningTime="2025-10-09 13:46:58.386909465 +0000 UTC m=+1294.160700504" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545182 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545274 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545304 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvc2p\" (UniqueName: \"kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545358 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545401 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545502 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545555 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.545595 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.646848 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.646975 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647017 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647059 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647113 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647136 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647166 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvc2p\" (UniqueName: \"kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.647182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.652555 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.652811 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.653077 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.653983 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.654329 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.655860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.660073 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.671240 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvc2p\" (UniqueName: \"kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p\") pod \"keystone-98dbd4bdf-stq5g\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:58 crc kubenswrapper[4762]: I1009 13:46:58.688930 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:46:59 crc kubenswrapper[4762]: I1009 13:46:59.133321 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:46:59 crc kubenswrapper[4762]: W1009 13:46:59.135544 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06ff304d_2fbe_412d_8eeb_098ff74fc7a6.slice/crio-7c4fbbcedf637a27a556df8edb57f00e0ac787ed08f5e5132d65b318e3e435d0 WatchSource:0}: Error finding container 7c4fbbcedf637a27a556df8edb57f00e0ac787ed08f5e5132d65b318e3e435d0: Status 404 returned error can't find the container with id 7c4fbbcedf637a27a556df8edb57f00e0ac787ed08f5e5132d65b318e3e435d0 Oct 09 13:46:59 crc kubenswrapper[4762]: I1009 13:46:59.318915 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-98dbd4bdf-stq5g" event={"ID":"06ff304d-2fbe-412d-8eeb-098ff74fc7a6","Type":"ContainerStarted","Data":"7c4fbbcedf637a27a556df8edb57f00e0ac787ed08f5e5132d65b318e3e435d0"} Oct 09 13:47:00 crc kubenswrapper[4762]: I1009 13:47:00.332275 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-98dbd4bdf-stq5g" event={"ID":"06ff304d-2fbe-412d-8eeb-098ff74fc7a6","Type":"ContainerStarted","Data":"041eae0fb2897099972d7b4151296f3413a47cd494bf2d693195499557b5f03d"} Oct 09 13:47:00 crc kubenswrapper[4762]: I1009 13:47:00.332613 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:47:00 crc kubenswrapper[4762]: I1009 13:47:00.358828 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-98dbd4bdf-stq5g" podStartSLOduration=2.358805096 podStartE2EDuration="2.358805096s" podCreationTimestamp="2025-10-09 13:46:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:00.350427858 +0000 UTC m=+1296.124218897" watchObservedRunningTime="2025-10-09 13:47:00.358805096 +0000 UTC m=+1296.132596135" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.447802 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.448212 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.483177 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.490076 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.491706 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.496165 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.528119 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 13:47:01 crc kubenswrapper[4762]: I1009 13:47:01.560619 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.352118 4762 generic.go:334] "Generic (PLEG): container finished" podID="f8ef7222-071d-403b-8356-7645662e1226" containerID="a841791cf90fa74f319eca5395a3688fe6dc37b0b882cf9492bb39552d6056fe" exitCode=0 Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.352193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vxzqs" event={"ID":"f8ef7222-071d-403b-8356-7645662e1226","Type":"ContainerDied","Data":"a841791cf90fa74f319eca5395a3688fe6dc37b0b882cf9492bb39552d6056fe"} Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.353252 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.353287 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.353298 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:02 crc kubenswrapper[4762]: I1009 13:47:02.353307 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 13:47:03 crc kubenswrapper[4762]: I1009 13:47:03.367220 4762 generic.go:334] "Generic (PLEG): container finished" podID="f2acc43c-4777-4ee7-8cd9-1314007c0be8" containerID="7083d406a3f63fd2886bc29f3375460879083ff10e1ca9d1fd6c743eeab66078" exitCode=0 Oct 09 13:47:03 crc kubenswrapper[4762]: I1009 13:47:03.367297 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m6k5r" event={"ID":"f2acc43c-4777-4ee7-8cd9-1314007c0be8","Type":"ContainerDied","Data":"7083d406a3f63fd2886bc29f3375460879083ff10e1ca9d1fd6c743eeab66078"} Oct 09 13:47:03 crc kubenswrapper[4762]: I1009 13:47:03.369674 4762 generic.go:334] "Generic (PLEG): container finished" podID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" containerID="effb344f1dc334fbf893689c9197673d5eca5fb09afba6dd151e0ccd544b21f5" exitCode=0 Oct 09 13:47:03 crc kubenswrapper[4762]: I1009 13:47:03.369845 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4tqkk" event={"ID":"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d","Type":"ContainerDied","Data":"effb344f1dc334fbf893689c9197673d5eca5fb09afba6dd151e0ccd544b21f5"} Oct 09 13:47:04 crc kubenswrapper[4762]: I1009 13:47:04.317257 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:04 crc kubenswrapper[4762]: I1009 13:47:04.334507 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:04 crc kubenswrapper[4762]: I1009 13:47:04.367789 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 13:47:04 crc kubenswrapper[4762]: I1009 13:47:04.368377 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.705539 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.816458 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle\") pod \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.816538 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config\") pod \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.816743 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmpkz\" (UniqueName: \"kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz\") pod \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\" (UID: \"f2acc43c-4777-4ee7-8cd9-1314007c0be8\") " Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.822954 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz" (OuterVolumeSpecName: "kube-api-access-dmpkz") pod "f2acc43c-4777-4ee7-8cd9-1314007c0be8" (UID: "f2acc43c-4777-4ee7-8cd9-1314007c0be8"). InnerVolumeSpecName "kube-api-access-dmpkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.845991 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2acc43c-4777-4ee7-8cd9-1314007c0be8" (UID: "f2acc43c-4777-4ee7-8cd9-1314007c0be8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.859921 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config" (OuterVolumeSpecName: "config") pod "f2acc43c-4777-4ee7-8cd9-1314007c0be8" (UID: "f2acc43c-4777-4ee7-8cd9-1314007c0be8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.918888 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmpkz\" (UniqueName: \"kubernetes.io/projected/f2acc43c-4777-4ee7-8cd9-1314007c0be8-kube-api-access-dmpkz\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.918931 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:07 crc kubenswrapper[4762]: I1009 13:47:07.918942 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f2acc43c-4777-4ee7-8cd9-1314007c0be8-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:08 crc kubenswrapper[4762]: I1009 13:47:08.420525 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m6k5r" event={"ID":"f2acc43c-4777-4ee7-8cd9-1314007c0be8","Type":"ContainerDied","Data":"eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6"} Oct 09 13:47:08 crc kubenswrapper[4762]: I1009 13:47:08.420563 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb32a99e30d333b578a501b328a023d551608638c35e9a991eec5edaf3841aa6" Oct 09 13:47:08 crc kubenswrapper[4762]: I1009 13:47:08.420587 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m6k5r" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.019829 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:09 crc kubenswrapper[4762]: E1009 13:47:09.031096 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2acc43c-4777-4ee7-8cd9-1314007c0be8" containerName="neutron-db-sync" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.031141 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2acc43c-4777-4ee7-8cd9-1314007c0be8" containerName="neutron-db-sync" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.031875 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2acc43c-4777-4ee7-8cd9-1314007c0be8" containerName="neutron-db-sync" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.033616 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.093950 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.149831 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151187 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151390 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151512 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151618 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151814 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.151953 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d42s\" (UniqueName: \"kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.152054 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.155230 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hlnc6" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.155458 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.168014 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.168597 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.193319 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.257807 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.257882 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.257908 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xvbd\" (UniqueName: \"kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.257939 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.258023 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259331 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259373 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259429 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259519 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d42s\" (UniqueName: \"kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259549 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.259626 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.261928 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.262463 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.262518 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.274575 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.307552 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d42s\" (UniqueName: \"kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s\") pod \"dnsmasq-dns-5ccc5c4795-zzgz2\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.364015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.364076 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.364197 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.364219 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xvbd\" (UniqueName: \"kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.364240 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.372242 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.374161 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.375792 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.378704 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.386237 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.392901 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xvbd\" (UniqueName: \"kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd\") pod \"neutron-d57f8f89d-6fc59\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.488608 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.979011 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:47:09 crc kubenswrapper[4762]: I1009 13:47:09.994522 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vxzqs" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.077804 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs\") pod \"f8ef7222-071d-403b-8356-7645662e1226\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.077947 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle\") pod \"f8ef7222-071d-403b-8356-7645662e1226\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078050 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle\") pod \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078103 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data\") pod \"f8ef7222-071d-403b-8356-7645662e1226\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078129 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts\") pod \"f8ef7222-071d-403b-8356-7645662e1226\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078155 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnkqp\" (UniqueName: \"kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp\") pod \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078176 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data\") pod \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\" (UID: \"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078236 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzx7q\" (UniqueName: \"kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q\") pod \"f8ef7222-071d-403b-8356-7645662e1226\" (UID: \"f8ef7222-071d-403b-8356-7645662e1226\") " Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078260 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs" (OuterVolumeSpecName: "logs") pod "f8ef7222-071d-403b-8356-7645662e1226" (UID: "f8ef7222-071d-403b-8356-7645662e1226"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.078713 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8ef7222-071d-403b-8356-7645662e1226-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.082751 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q" (OuterVolumeSpecName: "kube-api-access-zzx7q") pod "f8ef7222-071d-403b-8356-7645662e1226" (UID: "f8ef7222-071d-403b-8356-7645662e1226"). InnerVolumeSpecName "kube-api-access-zzx7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.083923 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp" (OuterVolumeSpecName: "kube-api-access-hnkqp") pod "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" (UID: "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d"). InnerVolumeSpecName "kube-api-access-hnkqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.085285 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" (UID: "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.090809 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts" (OuterVolumeSpecName: "scripts") pod "f8ef7222-071d-403b-8356-7645662e1226" (UID: "f8ef7222-071d-403b-8356-7645662e1226"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.124664 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data" (OuterVolumeSpecName: "config-data") pod "f8ef7222-071d-403b-8356-7645662e1226" (UID: "f8ef7222-071d-403b-8356-7645662e1226"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.135055 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" (UID: "5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.139932 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8ef7222-071d-403b-8356-7645662e1226" (UID: "f8ef7222-071d-403b-8356-7645662e1226"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184220 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzx7q\" (UniqueName: \"kubernetes.io/projected/f8ef7222-071d-403b-8356-7645662e1226-kube-api-access-zzx7q\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184248 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184258 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184269 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184278 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8ef7222-071d-403b-8356-7645662e1226-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184287 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnkqp\" (UniqueName: \"kubernetes.io/projected/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-kube-api-access-hnkqp\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.184295 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.450899 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4tqkk" event={"ID":"5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d","Type":"ContainerDied","Data":"5bdd9cd488a5fc94592c34e66d22a26decf414dac7995cfc3075b1329c42d861"} Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.451179 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bdd9cd488a5fc94592c34e66d22a26decf414dac7995cfc3075b1329c42d861" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.451235 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4tqkk" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.458028 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vxzqs" event={"ID":"f8ef7222-071d-403b-8356-7645662e1226","Type":"ContainerDied","Data":"44f31119989e50a82a1056b4405f0b9f5e91c2d3c7e0c5a434173e38e6d6c641"} Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.458065 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44f31119989e50a82a1056b4405f0b9f5e91c2d3c7e0c5a434173e38e6d6c641" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.458130 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vxzqs" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.461448 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerStarted","Data":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.461657 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-central-agent" containerID="cri-o://c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" gracePeriod=30 Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.461762 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.462114 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="proxy-httpd" containerID="cri-o://296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" gracePeriod=30 Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.462185 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="sg-core" containerID="cri-o://ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" gracePeriod=30 Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.462235 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-notification-agent" containerID="cri-o://040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" gracePeriod=30 Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.498204 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.890278904 podStartE2EDuration="48.498189318s" podCreationTimestamp="2025-10-09 13:46:22 +0000 UTC" firstStartedPulling="2025-10-09 13:46:23.460969611 +0000 UTC m=+1259.234760660" lastFinishedPulling="2025-10-09 13:47:10.068880035 +0000 UTC m=+1305.842671074" observedRunningTime="2025-10-09 13:47:10.496953988 +0000 UTC m=+1306.270745027" watchObservedRunningTime="2025-10-09 13:47:10.498189318 +0000 UTC m=+1306.271980357" Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.519244 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:10 crc kubenswrapper[4762]: I1009 13:47:10.592360 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:10 crc kubenswrapper[4762]: W1009 13:47:10.607568 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60dea6d8_cdd4_4cf1_bcac_eb6babcfaac6.slice/crio-3624264e2bae1f9bd265fd4e638c225f20d7825d82dbc58f581bb669f679a5f6 WatchSource:0}: Error finding container 3624264e2bae1f9bd265fd4e638c225f20d7825d82dbc58f581bb669f679a5f6: Status 404 returned error can't find the container with id 3624264e2bae1f9bd265fd4e638c225f20d7825d82dbc58f581bb669f679a5f6 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.192949 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.222796 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224799 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="sg-core" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224823 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="sg-core" Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224845 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-central-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224852 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-central-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224878 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" containerName="barbican-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224887 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" containerName="barbican-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224905 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ef7222-071d-403b-8356-7645662e1226" containerName="placement-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224913 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ef7222-071d-403b-8356-7645662e1226" containerName="placement-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224926 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-notification-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224933 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-notification-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: E1009 13:47:11.224950 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="proxy-httpd" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.224957 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="proxy-httpd" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225174 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-central-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225200 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="ceilometer-notification-agent" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225219 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" containerName="barbican-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225229 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="proxy-httpd" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225247 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerName="sg-core" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.225254 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ef7222-071d-403b-8356-7645662e1226" containerName="placement-db-sync" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.226331 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.228464 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p2q6s" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.228716 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.228798 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.232326 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.242848 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.256000 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.286234 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.291154 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.298992 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.299240 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-bzkm2" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.310852 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316462 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316512 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316549 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh2n9\" (UniqueName: \"kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316654 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316768 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316839 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.316868 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml\") pod \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\" (UID: \"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6\") " Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317551 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317635 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317736 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317776 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317833 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317899 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfgbx\" (UniqueName: \"kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.317952 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.320675 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.326749 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.336851 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.369668 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts" (OuterVolumeSpecName: "scripts") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.385918 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9" (OuterVolumeSpecName: "kube-api-access-hh2n9") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "kube-api-access-hh2n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.394059 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.395577 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.399064 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.423956 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.424928 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfgbx\" (UniqueName: \"kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.424997 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425039 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zjkw\" (UniqueName: \"kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425060 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425079 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425094 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425116 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425188 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425211 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425240 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425267 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425313 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425324 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh2n9\" (UniqueName: \"kubernetes.io/projected/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-kube-api-access-hh2n9\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425335 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.425345 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.426804 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.435321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.435702 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.435908 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.438334 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.438400 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.439163 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.460798 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.477167 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfgbx\" (UniqueName: \"kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx\") pod \"placement-7ff674b5d8-5r7qh\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.493543 4762 generic.go:334] "Generic (PLEG): container finished" podID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" containerID="8ff6e3662b1e64afa49a5d1fd95fcbf337f6d6c5697ed090eaa2a76cad7fcd65" exitCode=0 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.493610 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" event={"ID":"81f68bfd-0fb6-4e99-8712-7d10bed183d8","Type":"ContainerDied","Data":"8ff6e3662b1e64afa49a5d1fd95fcbf337f6d6c5697ed090eaa2a76cad7fcd65"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.493653 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" event={"ID":"81f68bfd-0fb6-4e99-8712-7d10bed183d8","Type":"ContainerStarted","Data":"b36617330f2f1a62cd9943a98ad1e4e2c27ded1d544dfb0c4a5cb4fa18ff7e5f"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.499237 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerStarted","Data":"3622a8256ddb6de752f1030721eca2f21adbf00ed0b2d1c1143da9c780959d68"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.499309 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerStarted","Data":"4ef0705bb5b5f705730dcbf274c818e7737e519b60eb4c19dd124e106257e058"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.499319 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerStarted","Data":"3624264e2bae1f9bd265fd4e638c225f20d7825d82dbc58f581bb669f679a5f6"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.499345 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.523973 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527281 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527334 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527364 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527442 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zjkw\" (UniqueName: \"kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527461 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527482 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527498 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527518 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzdxw\" (UniqueName: \"kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527540 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527560 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.527618 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.528157 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.536184 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.536214 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-glh4c" event={"ID":"5bba279a-35cf-4a4e-8632-2098cad8fa08","Type":"ContainerStarted","Data":"206fcc19fcfff363b71d18cc9c4ed2f9703f84ea849ddaea3eea31afd977e51e"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.536350 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.570260 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.571249 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.573265 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.585736 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.593889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zjkw\" (UniqueName: \"kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw\") pod \"barbican-worker-68c6fd668c-fdnbq\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.594439 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.602143 4762 generic.go:334] "Generic (PLEG): container finished" podID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" exitCode=0 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.602174 4762 generic.go:334] "Generic (PLEG): container finished" podID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" exitCode=2 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.602184 4762 generic.go:334] "Generic (PLEG): container finished" podID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" exitCode=0 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.602193 4762 generic.go:334] "Generic (PLEG): container finished" podID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" exitCode=0 Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.602270 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.608842 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerDied","Data":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.608972 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerDied","Data":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.609060 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerDied","Data":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.609144 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerDied","Data":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.609229 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6","Type":"ContainerDied","Data":"b91cc6b46d023edfe3e07ce395acf254713551cfb8ea375978638666cdb76e58"} Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.609164 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.609182 4762 scope.go:117] "RemoveContainer" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.618973 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640025 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzdxw\" (UniqueName: \"kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640083 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640116 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwz29\" (UniqueName: \"kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640157 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640180 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640273 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640331 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640393 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640439 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.640561 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.656955 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.666769 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.667112 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.681043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.682535 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.685176 4762 scope.go:117] "RemoveContainer" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.685820 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.698495 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzdxw\" (UniqueName: \"kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw\") pod \"barbican-keystone-listener-86957c694-98m78\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.705044 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data" (OuterVolumeSpecName: "config-data") pod "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" (UID: "5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.747200 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748164 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748234 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748273 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwz29\" (UniqueName: \"kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748289 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748307 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748342 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748372 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748389 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748409 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4dsc\" (UniqueName: \"kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748475 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748501 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748542 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748553 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748820 4762 scope.go:117] "RemoveContainer" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.748897 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.755586 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.762069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.773227 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.808456 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwz29\" (UniqueName: \"kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29\") pod \"barbican-api-6fb867b946-xgc2n\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.816874 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d57f8f89d-6fc59" podStartSLOduration=2.816851989 podStartE2EDuration="2.816851989s" podCreationTimestamp="2025-10-09 13:47:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:11.592980749 +0000 UTC m=+1307.366771798" watchObservedRunningTime="2025-10-09 13:47:11.816851989 +0000 UTC m=+1307.590643028" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851534 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851623 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851676 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851700 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851746 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.851769 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4dsc\" (UniqueName: \"kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.853736 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.854996 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.864935 4762 scope.go:117] "RemoveContainer" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.865712 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.866172 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.871064 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.874193 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-glh4c" podStartSLOduration=3.235351318 podStartE2EDuration="47.87416704s" podCreationTimestamp="2025-10-09 13:46:24 +0000 UTC" firstStartedPulling="2025-10-09 13:46:25.419184873 +0000 UTC m=+1261.192975922" lastFinishedPulling="2025-10-09 13:47:10.058000605 +0000 UTC m=+1305.831791644" observedRunningTime="2025-10-09 13:47:11.638810775 +0000 UTC m=+1307.412601824" watchObservedRunningTime="2025-10-09 13:47:11.87416704 +0000 UTC m=+1307.647958079" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.892889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4dsc\" (UniqueName: \"kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc\") pod \"dnsmasq-dns-688c87cc99-pbdfg\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.970847 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:47:11 crc kubenswrapper[4762]: I1009 13:47:11.990053 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.035050 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.035490 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.041009 4762 scope.go:117] "RemoveContainer" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.051376 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": container with ID starting with 296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80 not found: ID does not exist" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.051438 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} err="failed to get container status \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": rpc error: code = NotFound desc = could not find container \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": container with ID starting with 296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80 not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.051470 4762 scope.go:117] "RemoveContainer" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.055203 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": container with ID starting with ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e not found: ID does not exist" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.055242 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} err="failed to get container status \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": rpc error: code = NotFound desc = could not find container \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": container with ID starting with ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.055269 4762 scope.go:117] "RemoveContainer" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.063629 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": container with ID starting with 040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c not found: ID does not exist" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.063693 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} err="failed to get container status \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": rpc error: code = NotFound desc = could not find container \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": container with ID starting with 040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.063725 4762 scope.go:117] "RemoveContainer" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.066871 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": container with ID starting with c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce not found: ID does not exist" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.066913 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} err="failed to get container status \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": rpc error: code = NotFound desc = could not find container \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": container with ID starting with c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.066947 4762 scope.go:117] "RemoveContainer" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.069752 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.071929 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} err="failed to get container status \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": rpc error: code = NotFound desc = could not find container \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": container with ID starting with 296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80 not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.071984 4762 scope.go:117] "RemoveContainer" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.076124 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} err="failed to get container status \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": rpc error: code = NotFound desc = could not find container \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": container with ID starting with ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.076160 4762 scope.go:117] "RemoveContainer" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.093349 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} err="failed to get container status \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": rpc error: code = NotFound desc = could not find container \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": container with ID starting with 040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.093394 4762 scope.go:117] "RemoveContainer" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.096898 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} err="failed to get container status \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": rpc error: code = NotFound desc = could not find container \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": container with ID starting with c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.096932 4762 scope.go:117] "RemoveContainer" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.099816 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} err="failed to get container status \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": rpc error: code = NotFound desc = could not find container \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": container with ID starting with 296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80 not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.099851 4762 scope.go:117] "RemoveContainer" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.100976 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} err="failed to get container status \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": rpc error: code = NotFound desc = could not find container \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": container with ID starting with ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.101004 4762 scope.go:117] "RemoveContainer" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.102794 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} err="failed to get container status \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": rpc error: code = NotFound desc = could not find container \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": container with ID starting with 040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.102826 4762 scope.go:117] "RemoveContainer" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.103945 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.104342 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} err="failed to get container status \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": rpc error: code = NotFound desc = could not find container \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": container with ID starting with c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.104367 4762 scope.go:117] "RemoveContainer" containerID="296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.106361 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80"} err="failed to get container status \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": rpc error: code = NotFound desc = could not find container \"296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80\": container with ID starting with 296e212d0f41c1d6cbf0a66b19cea0496f58e8131068fd90d5f477aea27fba80 not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.106410 4762 scope.go:117] "RemoveContainer" containerID="ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.106487 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.110117 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e"} err="failed to get container status \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": rpc error: code = NotFound desc = could not find container \"ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e\": container with ID starting with ab9ab23121b6564ff3e9a6228bd71c66f0043464c5fbaef06851bd437bf5873e not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.110166 4762 scope.go:117] "RemoveContainer" containerID="040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.115220 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.115504 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.121889 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c"} err="failed to get container status \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": rpc error: code = NotFound desc = could not find container \"040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c\": container with ID starting with 040bf2f9501a5d36c48126d463edc324f6d5c53ac41fd303719ba75cc019e86c not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.121933 4762 scope.go:117] "RemoveContainer" containerID="c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.122900 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce"} err="failed to get container status \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": rpc error: code = NotFound desc = could not find container \"c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce\": container with ID starting with c9ffef1e5361c8db03e232f2563a25595647571940c40e43a0beb3520cdce0ce not found: ID does not exist" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.134364 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157187 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157278 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157327 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157357 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157445 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wlst\" (UniqueName: \"kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157476 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.157508 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258260 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258617 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258670 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258747 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wlst\" (UniqueName: \"kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258810 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.258857 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.266871 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.266955 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.275183 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.275843 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.278948 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.288221 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wlst\" (UniqueName: \"kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.288762 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.288861 4762 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 09 13:47:12 crc kubenswrapper[4762]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/81f68bfd-0fb6-4e99-8712-7d10bed183d8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 09 13:47:12 crc kubenswrapper[4762]: > podSandboxID="b36617330f2f1a62cd9943a98ad1e4e2c27ded1d544dfb0c4a5cb4fa18ff7e5f" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.288987 4762 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 09 13:47:12 crc kubenswrapper[4762]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dchc4h67h57hb6h78hcdh7dh96h697h5d6h5ddh58h68ch575h574h686h56h578h5d6h8bh699h7chchcbhdfhb9h5d8h88h554h57h564q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4d42s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc5c4795-zzgz2_openstack(81f68bfd-0fb6-4e99-8712-7d10bed183d8): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/81f68bfd-0fb6-4e99-8712-7d10bed183d8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 09 13:47:12 crc kubenswrapper[4762]: > logger="UnhandledError" Oct 09 13:47:12 crc kubenswrapper[4762]: E1009 13:47:12.290388 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/81f68bfd-0fb6-4e99-8712-7d10bed183d8/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" podUID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.423944 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.496538 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.512548 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.641407 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerStarted","Data":"8f6976c493dcee8ad9dcacdd143d86848fe00e3693ad04efe0539c9fbe3ac74b"} Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.662878 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerStarted","Data":"3edca0b86a632b7f1eb6f8a0bd13452b2a3e5518d956598438033a3b51d59461"} Oct 09 13:47:12 crc kubenswrapper[4762]: I1009 13:47:12.890805 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.003823 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6" path="/var/lib/kubelet/pods/5ea30a61-2d91-4e14-b68c-fb70bd0c1ef6/volumes" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.029011 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.037477 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:47:13 crc kubenswrapper[4762]: W1009 13:47:13.056453 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4eb4eb3_a79e_4537_9401_37f948255f3b.slice/crio-12985e866a9257b9c4f35126eb1bc5d81fc3d4e06d004290e0686df1519b296e WatchSource:0}: Error finding container 12985e866a9257b9c4f35126eb1bc5d81fc3d4e06d004290e0686df1519b296e: Status 404 returned error can't find the container with id 12985e866a9257b9c4f35126eb1bc5d81fc3d4e06d004290e0686df1519b296e Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.233314 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.243346 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:47:13 crc kubenswrapper[4762]: E1009 13:47:13.244503 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" containerName="init" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.244599 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" containerName="init" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.245148 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" containerName="init" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.249888 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.270488 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.276235 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.277401 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314496 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314609 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314719 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314737 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314756 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d42s\" (UniqueName: \"kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.314791 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0\") pod \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\" (UID: \"81f68bfd-0fb6-4e99-8712-7d10bed183d8\") " Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315055 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315095 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315130 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315156 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315189 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315209 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrvr9\" (UniqueName: \"kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.315279 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.383466 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s" (OuterVolumeSpecName: "kube-api-access-4d42s") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "kube-api-access-4d42s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.410830 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416531 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416653 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416696 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416746 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416848 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrvr9\" (UniqueName: \"kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.416946 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d42s\" (UniqueName: \"kubernetes.io/projected/81f68bfd-0fb6-4e99-8712-7d10bed183d8-kube-api-access-4d42s\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.424626 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.427112 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.438497 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.479697 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.490744 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.490981 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrvr9\" (UniqueName: \"kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.493277 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs\") pod \"neutron-c7fd4cf7c-phtkg\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.522013 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.611651 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.614366 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.628360 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.628392 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.656123 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.658400 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.715732 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerStarted","Data":"eebda60b2bc08955b8447c9df8c9d6a20813d4f463c8adc8d75c1269ebc66f87"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.715779 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerStarted","Data":"018abaf844835c448d5d521260a9f283b0530f861ebae6bd9952ed1cca78e0d0"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.729243 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerStarted","Data":"12985e866a9257b9c4f35126eb1bc5d81fc3d4e06d004290e0686df1519b296e"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.729815 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.730193 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.736485 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config" (OuterVolumeSpecName: "config") pod "81f68bfd-0fb6-4e99-8712-7d10bed183d8" (UID: "81f68bfd-0fb6-4e99-8712-7d10bed183d8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.744712 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerStarted","Data":"23815f5a561063d232341e5c1bdd222c0042c43c875ca3fd331afec3554cb11f"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.762666 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerStarted","Data":"4f55ff853efdbb8502fda62a7ac534409ccbb3edb1a2ac61f71cf54811321910"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.764342 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerStarted","Data":"eb8fd27334157329a670ffea0786695b26391f6fedd7ed26fb45cec4f8badad1"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.766471 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" event={"ID":"81f68bfd-0fb6-4e99-8712-7d10bed183d8","Type":"ContainerDied","Data":"b36617330f2f1a62cd9943a98ad1e4e2c27ded1d544dfb0c4a5cb4fa18ff7e5f"} Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.766498 4762 scope.go:117] "RemoveContainer" containerID="8ff6e3662b1e64afa49a5d1fd95fcbf337f6d6c5697ed090eaa2a76cad7fcd65" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.766618 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-zzgz2" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.832203 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81f68bfd-0fb6-4e99-8712-7d10bed183d8-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.904251 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:13 crc kubenswrapper[4762]: I1009 13:47:13.912799 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-zzgz2"] Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.386452 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:47:14 crc kubenswrapper[4762]: W1009 13:47:14.405369 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd7f6272_8875_4adb_b8d3_1cfe2651f738.slice/crio-2877ed6338b6ef4354554fa46bb599052c2ab4f1b4dcc1fc7a413876bb015467 WatchSource:0}: Error finding container 2877ed6338b6ef4354554fa46bb599052c2ab4f1b4dcc1fc7a413876bb015467: Status 404 returned error can't find the container with id 2877ed6338b6ef4354554fa46bb599052c2ab4f1b4dcc1fc7a413876bb015467 Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.806882 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerStarted","Data":"75fb11579ca65d227b26b52153b7cd447fed5fcfb5bfc704642e8c32edf4fd96"} Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.807258 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.807284 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.809605 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerStarted","Data":"2877ed6338b6ef4354554fa46bb599052c2ab4f1b4dcc1fc7a413876bb015467"} Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.856065 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7ff674b5d8-5r7qh" podStartSLOduration=3.856047722 podStartE2EDuration="3.856047722s" podCreationTimestamp="2025-10-09 13:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:14.848183838 +0000 UTC m=+1310.621974897" watchObservedRunningTime="2025-10-09 13:47:14.856047722 +0000 UTC m=+1310.629838761" Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.890761 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f1fc0dd-7a28-4a27-853e-714add235884" containerID="eebda60b2bc08955b8447c9df8c9d6a20813d4f463c8adc8d75c1269ebc66f87" exitCode=0 Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.890871 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerDied","Data":"eebda60b2bc08955b8447c9df8c9d6a20813d4f463c8adc8d75c1269ebc66f87"} Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.922019 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerStarted","Data":"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18"} Oct 09 13:47:14 crc kubenswrapper[4762]: I1009 13:47:14.988929 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f68bfd-0fb6-4e99-8712-7d10bed183d8" path="/var/lib/kubelet/pods/81f68bfd-0fb6-4e99-8712-7d10bed183d8/volumes" Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.932874 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerStarted","Data":"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436"} Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.933148 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.933163 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.934558 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerStarted","Data":"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d"} Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.936512 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerStarted","Data":"7da27e667e32bf44aad3e2ed6cf4fb05fb6f8f01d096a1d26339c6da6bfcee64"} Oct 09 13:47:15 crc kubenswrapper[4762]: I1009 13:47:15.959432 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6fb867b946-xgc2n" podStartSLOduration=4.959411065 podStartE2EDuration="4.959411065s" podCreationTimestamp="2025-10-09 13:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:15.957887657 +0000 UTC m=+1311.731678716" watchObservedRunningTime="2025-10-09 13:47:15.959411065 +0000 UTC m=+1311.733202104" Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.833210 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.835228 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.839478 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.839613 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.857924 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.985455 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerStarted","Data":"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c"} Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.985500 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.985515 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerStarted","Data":"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff"} Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.985525 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerStarted","Data":"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138"} Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.998250 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerStarted","Data":"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e"} Oct 09 13:47:16 crc kubenswrapper[4762]: I1009 13:47:16.998292 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerStarted","Data":"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409"} Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.002980 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.003070 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnxbd\" (UniqueName: \"kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.004036 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-68c6fd668c-fdnbq" podStartSLOduration=2.222355962 podStartE2EDuration="6.004019181s" podCreationTimestamp="2025-10-09 13:47:11 +0000 UTC" firstStartedPulling="2025-10-09 13:47:12.439470694 +0000 UTC m=+1308.213261733" lastFinishedPulling="2025-10-09 13:47:16.221133913 +0000 UTC m=+1311.994924952" observedRunningTime="2025-10-09 13:47:16.995978203 +0000 UTC m=+1312.769769242" watchObservedRunningTime="2025-10-09 13:47:17.004019181 +0000 UTC m=+1312.777810220" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.006272 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.006347 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.006610 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.006827 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.006911 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.008604 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerStarted","Data":"95ef9dd368129326da01ec626402c9aaedbcb36ccf25df923f0635b581848384"} Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.008956 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.067560 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" podStartSLOduration=6.067536057 podStartE2EDuration="6.067536057s" podCreationTimestamp="2025-10-09 13:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:17.052507914 +0000 UTC m=+1312.826298963" watchObservedRunningTime="2025-10-09 13:47:17.067536057 +0000 UTC m=+1312.841327096" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.068546 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-c7fd4cf7c-phtkg" podStartSLOduration=4.068536852 podStartE2EDuration="4.068536852s" podCreationTimestamp="2025-10-09 13:47:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:17.024540261 +0000 UTC m=+1312.798331300" watchObservedRunningTime="2025-10-09 13:47:17.068536852 +0000 UTC m=+1312.842327911" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.084554 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-86957c694-98m78" podStartSLOduration=2.993017537 podStartE2EDuration="6.084530328s" podCreationTimestamp="2025-10-09 13:47:11 +0000 UTC" firstStartedPulling="2025-10-09 13:47:13.171708996 +0000 UTC m=+1308.945500035" lastFinishedPulling="2025-10-09 13:47:16.263221777 +0000 UTC m=+1312.037012826" observedRunningTime="2025-10-09 13:47:17.074351726 +0000 UTC m=+1312.848142765" watchObservedRunningTime="2025-10-09 13:47:17.084530328 +0000 UTC m=+1312.858321367" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108082 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108137 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108202 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108314 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnxbd\" (UniqueName: \"kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108402 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.108486 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.109454 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.113273 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.113442 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.117038 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.117354 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.119338 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.132207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnxbd\" (UniqueName: \"kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd\") pod \"barbican-api-7c78979758-qtqdp\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.306231 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:17 crc kubenswrapper[4762]: I1009 13:47:17.732964 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:47:18 crc kubenswrapper[4762]: I1009 13:47:18.022010 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerStarted","Data":"6cb4cb71d0041ba3ef3f2c9b89eaed8bf58f9438ccfbb0b77627b674729d7b79"} Oct 09 13:47:18 crc kubenswrapper[4762]: I1009 13:47:18.023290 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerStarted","Data":"813e916f60e18760c92180d5f44be9adc64b7b48bc273515999d583d8aeb9f3a"} Oct 09 13:47:18 crc kubenswrapper[4762]: I1009 13:47:18.026010 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerStarted","Data":"503629b9433525adb8785606dd0badfc76c6ade4081c6a7b306be8a6a78e0554"} Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.035367 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerStarted","Data":"940bb6da909be1f785661405cf69093008229cb06790274277183a2ddc92df99"} Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.035823 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.035834 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerStarted","Data":"fc6d0ce7f1b2e1335bd5df1ffae809fadabf2813cdfe1e329a8458d677bea605"} Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.037643 4762 generic.go:334] "Generic (PLEG): container finished" podID="5bba279a-35cf-4a4e-8632-2098cad8fa08" containerID="206fcc19fcfff363b71d18cc9c4ed2f9703f84ea849ddaea3eea31afd977e51e" exitCode=0 Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.037675 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-glh4c" event={"ID":"5bba279a-35cf-4a4e-8632-2098cad8fa08","Type":"ContainerDied","Data":"206fcc19fcfff363b71d18cc9c4ed2f9703f84ea849ddaea3eea31afd977e51e"} Oct 09 13:47:19 crc kubenswrapper[4762]: I1009 13:47:19.056860 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7c78979758-qtqdp" podStartSLOduration=3.056840793 podStartE2EDuration="3.056840793s" podCreationTimestamp="2025-10-09 13:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:19.05308452 +0000 UTC m=+1314.826875549" watchObservedRunningTime="2025-10-09 13:47:19.056840793 +0000 UTC m=+1314.830631832" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.054321 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerStarted","Data":"b84c270230770eba306f6f7c5ff2016fd300727513d4cbab2f10d8f2ac3f67db"} Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.054739 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.054759 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.082407 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.425351495 podStartE2EDuration="8.082372156s" podCreationTimestamp="2025-10-09 13:47:12 +0000 UTC" firstStartedPulling="2025-10-09 13:47:13.471778246 +0000 UTC m=+1309.245569295" lastFinishedPulling="2025-10-09 13:47:19.128798917 +0000 UTC m=+1314.902589956" observedRunningTime="2025-10-09 13:47:20.077475405 +0000 UTC m=+1315.851266444" watchObservedRunningTime="2025-10-09 13:47:20.082372156 +0000 UTC m=+1315.856163195" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.412960 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-glh4c" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.569423 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.570444 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.570482 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.570596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.570688 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.570731 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp64z\" (UniqueName: \"kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z\") pod \"5bba279a-35cf-4a4e-8632-2098cad8fa08\" (UID: \"5bba279a-35cf-4a4e-8632-2098cad8fa08\") " Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.571115 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.571902 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bba279a-35cf-4a4e-8632-2098cad8fa08-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.575153 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts" (OuterVolumeSpecName: "scripts") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.576380 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.577901 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z" (OuterVolumeSpecName: "kube-api-access-zp64z") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "kube-api-access-zp64z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.599301 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.626011 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data" (OuterVolumeSpecName: "config-data") pod "5bba279a-35cf-4a4e-8632-2098cad8fa08" (UID: "5bba279a-35cf-4a4e-8632-2098cad8fa08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.674140 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.674183 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.674199 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.674210 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bba279a-35cf-4a4e-8632-2098cad8fa08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:20 crc kubenswrapper[4762]: I1009 13:47:20.674221 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp64z\" (UniqueName: \"kubernetes.io/projected/5bba279a-35cf-4a4e-8632-2098cad8fa08-kube-api-access-zp64z\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.063858 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-glh4c" event={"ID":"5bba279a-35cf-4a4e-8632-2098cad8fa08","Type":"ContainerDied","Data":"95ce3e0c8bd79e6f7d208870a253409f666f6c3cf158abbd7610baf5e9be47bc"} Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.063921 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95ce3e0c8bd79e6f7d208870a253409f666f6c3cf158abbd7610baf5e9be47bc" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.063919 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-glh4c" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.463792 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:21 crc kubenswrapper[4762]: E1009 13:47:21.464200 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" containerName="cinder-db-sync" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.464217 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" containerName="cinder-db-sync" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.464450 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" containerName="cinder-db-sync" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.466202 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.470252 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.470489 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.470697 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.472399 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-89vpf" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.527692 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.528213 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="dnsmasq-dns" containerID="cri-o://95ef9dd368129326da01ec626402c9aaedbcb36ccf25df923f0635b581848384" gracePeriod=10 Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.530774 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.549428 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.591824 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.593625 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599703 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599769 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599791 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599821 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599843 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.599944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jm6s\" (UniqueName: \"kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.642243 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nd55\" (UniqueName: \"kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701607 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jm6s\" (UniqueName: \"kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701645 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701684 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701714 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701764 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701782 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701807 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701838 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701853 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.701870 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.702247 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.702527 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.710446 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.710450 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.724949 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.725309 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.726125 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.726846 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.732762 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.733245 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jm6s\" (UniqueName: \"kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s\") pod \"cinder-scheduler-0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.783140 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.802990 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nd55\" (UniqueName: \"kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.803032 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.803058 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.803089 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.803140 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.803179 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.804024 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.804834 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.805506 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.805545 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.807201 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.832510 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nd55\" (UniqueName: \"kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55\") pod \"dnsmasq-dns-6bb4fc677f-xggsj\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905222 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6sjh\" (UniqueName: \"kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905275 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905344 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905365 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905499 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905580 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.905606 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:21 crc kubenswrapper[4762]: I1009 13:47:21.910863 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6sjh\" (UniqueName: \"kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010622 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010681 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010703 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010782 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010801 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010816 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.010936 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.014784 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.019592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.019937 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.020144 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.037442 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6sjh\" (UniqueName: \"kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.042527 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.047771 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.156:5353: connect: connection refused" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.109239 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.110966 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f1fc0dd-7a28-4a27-853e-714add235884" containerID="95ef9dd368129326da01ec626402c9aaedbcb36ccf25df923f0635b581848384" exitCode=0 Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.111004 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerDied","Data":"95ef9dd368129326da01ec626402c9aaedbcb36ccf25df923f0635b581848384"} Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.311087 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.400358 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.628444 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631447 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4dsc\" (UniqueName: \"kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631512 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631579 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631681 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631752 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.631842 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0\") pod \"1f1fc0dd-7a28-4a27-853e-714add235884\" (UID: \"1f1fc0dd-7a28-4a27-853e-714add235884\") " Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.704910 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc" (OuterVolumeSpecName: "kube-api-access-s4dsc") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "kube-api-access-s4dsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.724519 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.742924 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4dsc\" (UniqueName: \"kubernetes.io/projected/1f1fc0dd-7a28-4a27-853e-714add235884-kube-api-access-s4dsc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.751202 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.769017 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.784860 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config" (OuterVolumeSpecName: "config") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.801701 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.837914 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f1fc0dd-7a28-4a27-853e-714add235884" (UID: "1f1fc0dd-7a28-4a27-853e-714add235884"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.850370 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.850407 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.850420 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.850430 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:22 crc kubenswrapper[4762]: I1009 13:47:22.850438 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f1fc0dd-7a28-4a27-853e-714add235884-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.171329 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerStarted","Data":"88f345dfc47a21edc6dcdfefcaf92ea050b5f8f3fe009e225037a632fe94f4fb"} Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.173863 4762 generic.go:334] "Generic (PLEG): container finished" podID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerID="5774719b0d337714838e1ccd38e6d6b0f915fade2d424da24558d6f97b1b7481" exitCode=0 Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.173928 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" event={"ID":"c881159f-991c-4817-b1e5-9ca70a30d8eb","Type":"ContainerDied","Data":"5774719b0d337714838e1ccd38e6d6b0f915fade2d424da24558d6f97b1b7481"} Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.173955 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" event={"ID":"c881159f-991c-4817-b1e5-9ca70a30d8eb","Type":"ContainerStarted","Data":"feb0f500cd9c914a1a5f51b20192518cfafb75af694a32c25a62f4c57afc9b8f"} Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.181355 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" event={"ID":"1f1fc0dd-7a28-4a27-853e-714add235884","Type":"ContainerDied","Data":"018abaf844835c448d5d521260a9f283b0530f861ebae6bd9952ed1cca78e0d0"} Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.181402 4762 scope.go:117] "RemoveContainer" containerID="95ef9dd368129326da01ec626402c9aaedbcb36ccf25df923f0635b581848384" Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.181425 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-pbdfg" Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.183494 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerStarted","Data":"5a8e019273aaf5b7414589a3eb8826b9151ced8c68ac6652949e002a78ffd305"} Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.241721 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.250723 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-pbdfg"] Oct 09 13:47:23 crc kubenswrapper[4762]: I1009 13:47:23.428780 4762 scope.go:117] "RemoveContainer" containerID="eebda60b2bc08955b8447c9df8c9d6a20813d4f463c8adc8d75c1269ebc66f87" Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.150619 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.207000 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" event={"ID":"c881159f-991c-4817-b1e5-9ca70a30d8eb","Type":"ContainerStarted","Data":"bb7094500d79c0b318ce5fd00a0464df84913dcb767170c8cc4e6662117a51d7"} Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.208481 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.232341 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerStarted","Data":"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816"} Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.234796 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" podStartSLOduration=3.234772967 podStartE2EDuration="3.234772967s" podCreationTimestamp="2025-10-09 13:47:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:24.232620603 +0000 UTC m=+1320.006411642" watchObservedRunningTime="2025-10-09 13:47:24.234772967 +0000 UTC m=+1320.008564006" Oct 09 13:47:24 crc kubenswrapper[4762]: I1009 13:47:24.998405 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" path="/var/lib/kubelet/pods/1f1fc0dd-7a28-4a27-853e-714add235884/volumes" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.294086 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerStarted","Data":"7ac811229727a3c8f0eb0f82aa12c351749d73f731bf2226728c9b4c61babe20"} Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.323972 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api-log" containerID="cri-o://73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" gracePeriod=30 Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.324377 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerStarted","Data":"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74"} Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.324419 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.324734 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api" containerID="cri-o://99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" gracePeriod=30 Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.376675 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.376655474 podStartE2EDuration="4.376655474s" podCreationTimestamp="2025-10-09 13:47:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:25.36394772 +0000 UTC m=+1321.137738749" watchObservedRunningTime="2025-10-09 13:47:25.376655474 +0000 UTC m=+1321.150446513" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.540611 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.554906 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.921545 4762 scope.go:117] "RemoveContainer" containerID="1da79f26359aee26752c54acdbfcef87849705856e9295f190304364682367f7" Oct 09 13:47:25 crc kubenswrapper[4762]: I1009 13:47:25.986967 4762 scope.go:117] "RemoveContainer" containerID="cd8fbedf164ef5c53010e752c2272dc7ecb5c43277adf73a78aad68b50176eb2" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.102949 4762 scope.go:117] "RemoveContainer" containerID="3852612f40e840980c8459a216578eb640eedfc511d9744d9ea1cc491588edab" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.279724 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.360498 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerStarted","Data":"10f774138f452fa387d822227469296560a775c504e41fe66ea8198260246abe"} Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.380425 4762 generic.go:334] "Generic (PLEG): container finished" podID="e6279636-8997-4662-b1b1-54e82a4d3921" containerID="99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" exitCode=0 Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.380463 4762 generic.go:334] "Generic (PLEG): container finished" podID="e6279636-8997-4662-b1b1-54e82a4d3921" containerID="73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" exitCode=143 Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.380888 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.380973 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerDied","Data":"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74"} Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.381945 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerDied","Data":"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816"} Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.382057 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6279636-8997-4662-b1b1-54e82a4d3921","Type":"ContainerDied","Data":"5a8e019273aaf5b7414589a3eb8826b9151ced8c68ac6652949e002a78ffd305"} Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.382035 4762 scope.go:117] "RemoveContainer" containerID="99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.410060 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.849301631 podStartE2EDuration="5.410034183s" podCreationTimestamp="2025-10-09 13:47:21 +0000 UTC" firstStartedPulling="2025-10-09 13:47:22.421763631 +0000 UTC m=+1318.195554670" lastFinishedPulling="2025-10-09 13:47:23.982496173 +0000 UTC m=+1319.756287222" observedRunningTime="2025-10-09 13:47:26.396373625 +0000 UTC m=+1322.170164664" watchObservedRunningTime="2025-10-09 13:47:26.410034183 +0000 UTC m=+1322.183825222" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.448551 4762 scope.go:117] "RemoveContainer" containerID="73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.466487 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.466589 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467121 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467246 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467327 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467409 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467527 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6sjh\" (UniqueName: \"kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh\") pod \"e6279636-8997-4662-b1b1-54e82a4d3921\" (UID: \"e6279636-8997-4662-b1b1-54e82a4d3921\") " Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.467748 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs" (OuterVolumeSpecName: "logs") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.468047 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6279636-8997-4662-b1b1-54e82a4d3921-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.469311 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.473561 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh" (OuterVolumeSpecName: "kube-api-access-t6sjh") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "kube-api-access-t6sjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.477210 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts" (OuterVolumeSpecName: "scripts") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.479121 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.479335 4762 scope.go:117] "RemoveContainer" containerID="99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.479936 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74\": container with ID starting with 99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74 not found: ID does not exist" containerID="99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.480032 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74"} err="failed to get container status \"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74\": rpc error: code = NotFound desc = could not find container \"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74\": container with ID starting with 99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74 not found: ID does not exist" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.480147 4762 scope.go:117] "RemoveContainer" containerID="73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.480657 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816\": container with ID starting with 73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816 not found: ID does not exist" containerID="73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.480803 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816"} err="failed to get container status \"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816\": rpc error: code = NotFound desc = could not find container \"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816\": container with ID starting with 73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816 not found: ID does not exist" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.480963 4762 scope.go:117] "RemoveContainer" containerID="99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.481426 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74"} err="failed to get container status \"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74\": rpc error: code = NotFound desc = could not find container \"99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74\": container with ID starting with 99acaa3d7661d027869c7971b4c8ec583b1a8ffc0bce46096ccd4ea00c314c74 not found: ID does not exist" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.481471 4762 scope.go:117] "RemoveContainer" containerID="73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.481818 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816"} err="failed to get container status \"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816\": rpc error: code = NotFound desc = could not find container \"73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816\": container with ID starting with 73a5ff1ada42b70e17cbde9dfc52f7bf0e96fb2587832ad253c0d3bcab30b816 not found: ID does not exist" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.517603 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.528935 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data" (OuterVolumeSpecName: "config-data") pod "e6279636-8997-4662-b1b1-54e82a4d3921" (UID: "e6279636-8997-4662-b1b1-54e82a4d3921"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.571960 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.572563 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6279636-8997-4662-b1b1-54e82a4d3921-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.572685 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.572813 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6sjh\" (UniqueName: \"kubernetes.io/projected/e6279636-8997-4662-b1b1-54e82a4d3921-kube-api-access-t6sjh\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.572998 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.573098 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6279636-8997-4662-b1b1-54e82a4d3921-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.760804 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.769052 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.784597 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.789811 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.790216 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790235 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api" Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.790249 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="init" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790256 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="init" Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.790291 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="dnsmasq-dns" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790298 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="dnsmasq-dns" Oct 09 13:47:26 crc kubenswrapper[4762]: E1009 13:47:26.790312 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api-log" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790317 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api-log" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790473 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api-log" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790488 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f1fc0dd-7a28-4a27-853e-714add235884" containerName="dnsmasq-dns" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.790502 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" containerName="cinder-api" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.791851 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.793769 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.794260 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.798056 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.807526 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.890797 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.890843 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.890904 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.890964 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl47c\" (UniqueName: \"kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.891055 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.891089 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.891114 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.891138 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.891220 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.978864 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6279636-8997-4662-b1b1-54e82a4d3921" path="/var/lib/kubelet/pods/e6279636-8997-4662-b1b1-54e82a4d3921/volumes" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992548 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992593 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992618 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992673 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl47c\" (UniqueName: \"kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992733 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992754 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992771 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992792 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.992965 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.994844 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.998376 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:26 crc kubenswrapper[4762]: I1009 13:47:26.999135 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.000157 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.002051 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.002321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.005883 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.010000 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl47c\" (UniqueName: \"kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c\") pod \"cinder-api-0\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.120990 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:47:27 crc kubenswrapper[4762]: W1009 13:47:27.620465 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a72b4a_cdc0_43e1_83a4_6d6629ec66c4.slice/crio-b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001 WatchSource:0}: Error finding container b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001: Status 404 returned error can't find the container with id b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001 Oct 09 13:47:27 crc kubenswrapper[4762]: I1009 13:47:27.624515 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:47:28 crc kubenswrapper[4762]: I1009 13:47:28.418929 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerStarted","Data":"5acb9f40d37976787a3176932ab61dc744bb85f63d3756a35a7aa17489a36b17"} Oct 09 13:47:28 crc kubenswrapper[4762]: I1009 13:47:28.419180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerStarted","Data":"b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001"} Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.079734 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.245332 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.328011 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.329344 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6fb867b946-xgc2n" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api-log" containerID="cri-o://2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18" gracePeriod=30 Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.329545 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6fb867b946-xgc2n" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api" containerID="cri-o://18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436" gracePeriod=30 Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.436289 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerStarted","Data":"c1be68916a3f81c7b6abc9ef288db0ad9f260fbe6b28ab389555e8e399229389"} Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.436377 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 13:47:29 crc kubenswrapper[4762]: I1009 13:47:29.485896 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.4858716149999998 podStartE2EDuration="3.485871615s" podCreationTimestamp="2025-10-09 13:47:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:29.473499248 +0000 UTC m=+1325.247290307" watchObservedRunningTime="2025-10-09 13:47:29.485871615 +0000 UTC m=+1325.259662654" Oct 09 13:47:30 crc kubenswrapper[4762]: I1009 13:47:30.442382 4762 generic.go:334] "Generic (PLEG): container finished" podID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerID="2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18" exitCode=143 Oct 09 13:47:30 crc kubenswrapper[4762]: I1009 13:47:30.443067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerDied","Data":"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18"} Oct 09 13:47:30 crc kubenswrapper[4762]: I1009 13:47:30.535156 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:47:31 crc kubenswrapper[4762]: I1009 13:47:31.911821 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:47:31 crc kubenswrapper[4762]: I1009 13:47:31.975807 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:47:31 crc kubenswrapper[4762]: I1009 13:47:31.976081 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="dnsmasq-dns" containerID="cri-o://47711bb0d3cd74fc5f368a4255a6bbe6f5cd820a881368bd09d8039fe16b962a" gracePeriod=10 Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.115123 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.155783 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464052 4762 generic.go:334] "Generic (PLEG): container finished" podID="3d330351-1e4e-497c-a765-028b31e5b570" containerID="47711bb0d3cd74fc5f368a4255a6bbe6f5cd820a881368bd09d8039fe16b962a" exitCode=0 Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464248 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" event={"ID":"3d330351-1e4e-497c-a765-028b31e5b570","Type":"ContainerDied","Data":"47711bb0d3cd74fc5f368a4255a6bbe6f5cd820a881368bd09d8039fe16b962a"} Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464426 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" event={"ID":"3d330351-1e4e-497c-a765-028b31e5b570","Type":"ContainerDied","Data":"c6df809bfd1275712425b2b0f21773b14ea0105dcb0947818b9c91c2f4ddcd16"} Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464462 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6df809bfd1275712425b2b0f21773b14ea0105dcb0947818b9c91c2f4ddcd16" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464616 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="cinder-scheduler" containerID="cri-o://7ac811229727a3c8f0eb0f82aa12c351749d73f731bf2226728c9b4c61babe20" gracePeriod=30 Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.464693 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="probe" containerID="cri-o://10f774138f452fa387d822227469296560a775c504e41fe66ea8198260246abe" gracePeriod=30 Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.517590 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709507 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709697 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709736 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j46sn\" (UniqueName: \"kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709803 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709868 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.709956 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc\") pod \"3d330351-1e4e-497c-a765-028b31e5b570\" (UID: \"3d330351-1e4e-497c-a765-028b31e5b570\") " Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.717275 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn" (OuterVolumeSpecName: "kube-api-access-j46sn") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "kube-api-access-j46sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.730430 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6fb867b946-xgc2n" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:47218->10.217.0.155:9311: read: connection reset by peer" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.730430 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6fb867b946-xgc2n" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:47202->10.217.0.155:9311: read: connection reset by peer" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.783221 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.792454 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.795602 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config" (OuterVolumeSpecName: "config") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.812829 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j46sn\" (UniqueName: \"kubernetes.io/projected/3d330351-1e4e-497c-a765-028b31e5b570-kube-api-access-j46sn\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.812863 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.812875 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.812888 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.813835 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.816908 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3d330351-1e4e-497c-a765-028b31e5b570" (UID: "3d330351-1e4e-497c-a765-028b31e5b570"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.914430 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:32 crc kubenswrapper[4762]: I1009 13:47:32.914465 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d330351-1e4e-497c-a765-028b31e5b570-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.140836 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.320868 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom\") pod \"f4eb4eb3-a79e-4537-9401-37f948255f3b\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.320925 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data\") pod \"f4eb4eb3-a79e-4537-9401-37f948255f3b\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.320950 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs\") pod \"f4eb4eb3-a79e-4537-9401-37f948255f3b\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.320976 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwz29\" (UniqueName: \"kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29\") pod \"f4eb4eb3-a79e-4537-9401-37f948255f3b\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.321004 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle\") pod \"f4eb4eb3-a79e-4537-9401-37f948255f3b\" (UID: \"f4eb4eb3-a79e-4537-9401-37f948255f3b\") " Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.321893 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs" (OuterVolumeSpecName: "logs") pod "f4eb4eb3-a79e-4537-9401-37f948255f3b" (UID: "f4eb4eb3-a79e-4537-9401-37f948255f3b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.326942 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f4eb4eb3-a79e-4537-9401-37f948255f3b" (UID: "f4eb4eb3-a79e-4537-9401-37f948255f3b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.328508 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29" (OuterVolumeSpecName: "kube-api-access-mwz29") pod "f4eb4eb3-a79e-4537-9401-37f948255f3b" (UID: "f4eb4eb3-a79e-4537-9401-37f948255f3b"). InnerVolumeSpecName "kube-api-access-mwz29". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.351355 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4eb4eb3-a79e-4537-9401-37f948255f3b" (UID: "f4eb4eb3-a79e-4537-9401-37f948255f3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.374918 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data" (OuterVolumeSpecName: "config-data") pod "f4eb4eb3-a79e-4537-9401-37f948255f3b" (UID: "f4eb4eb3-a79e-4537-9401-37f948255f3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.422298 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.422342 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.422357 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4eb4eb3-a79e-4537-9401-37f948255f3b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.422368 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4eb4eb3-a79e-4537-9401-37f948255f3b-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.422380 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwz29\" (UniqueName: \"kubernetes.io/projected/f4eb4eb3-a79e-4537-9401-37f948255f3b-kube-api-access-mwz29\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.489664 4762 generic.go:334] "Generic (PLEG): container finished" podID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerID="10f774138f452fa387d822227469296560a775c504e41fe66ea8198260246abe" exitCode=0 Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.489769 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerDied","Data":"10f774138f452fa387d822227469296560a775c504e41fe66ea8198260246abe"} Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496029 4762 generic.go:334] "Generic (PLEG): container finished" podID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerID="18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436" exitCode=0 Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496145 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-xhvzb" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496676 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerDied","Data":"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436"} Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496736 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fb867b946-xgc2n" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496755 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fb867b946-xgc2n" event={"ID":"f4eb4eb3-a79e-4537-9401-37f948255f3b","Type":"ContainerDied","Data":"12985e866a9257b9c4f35126eb1bc5d81fc3d4e06d004290e0686df1519b296e"} Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.496779 4762 scope.go:117] "RemoveContainer" containerID="18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.522847 4762 scope.go:117] "RemoveContainer" containerID="2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.527929 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.537277 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-xhvzb"] Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.548197 4762 scope.go:117] "RemoveContainer" containerID="18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436" Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.549396 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436\": container with ID starting with 18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436 not found: ID does not exist" containerID="18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.549467 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436"} err="failed to get container status \"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436\": rpc error: code = NotFound desc = could not find container \"18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436\": container with ID starting with 18a97f548d51347b9813f0a0c48f816a4d3eb9df11b78df8a79f131600833436 not found: ID does not exist" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.549502 4762 scope.go:117] "RemoveContainer" containerID="2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18" Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.550045 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18\": container with ID starting with 2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18 not found: ID does not exist" containerID="2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.550095 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18"} err="failed to get container status \"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18\": rpc error: code = NotFound desc = could not find container \"2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18\": container with ID starting with 2ce551ccc946b9b398ad2d34a89b9eef486d73862a6b24bc44f53327a17d6c18 not found: ID does not exist" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.556274 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.563219 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6fb867b946-xgc2n"] Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.580187 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.580680 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="init" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.580706 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="init" Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.580754 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.580764 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api" Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.580794 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="dnsmasq-dns" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.580802 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="dnsmasq-dns" Oct 09 13:47:33 crc kubenswrapper[4762]: E1009 13:47:33.580815 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api-log" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.580823 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api-log" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.581048 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d330351-1e4e-497c-a765-028b31e5b570" containerName="dnsmasq-dns" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.581070 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api-log" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.581081 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" containerName="barbican-api" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.581795 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.589893 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-64hmz" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.590360 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.592171 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.605155 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.728091 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j68j\" (UniqueName: \"kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.728264 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.728566 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.728622 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.830465 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.830571 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.830618 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j68j\" (UniqueName: \"kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.830718 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.831954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.835916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.836141 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.860745 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j68j\" (UniqueName: \"kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j\") pod \"openstackclient\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " pod="openstack/openstackclient" Oct 09 13:47:33 crc kubenswrapper[4762]: I1009 13:47:33.909178 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 13:47:34 crc kubenswrapper[4762]: I1009 13:47:34.378897 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 13:47:34 crc kubenswrapper[4762]: I1009 13:47:34.508013 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1862d6d3-5d91-47cf-8b78-c0298569ee90","Type":"ContainerStarted","Data":"33abc55d08282f25d3be056f8c399320b144196ad5d77042ce24c81da4ccdc1f"} Oct 09 13:47:34 crc kubenswrapper[4762]: I1009 13:47:34.986516 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d330351-1e4e-497c-a765-028b31e5b570" path="/var/lib/kubelet/pods/3d330351-1e4e-497c-a765-028b31e5b570/volumes" Oct 09 13:47:34 crc kubenswrapper[4762]: I1009 13:47:34.991068 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4eb4eb3-a79e-4537-9401-37f948255f3b" path="/var/lib/kubelet/pods/f4eb4eb3-a79e-4537-9401-37f948255f3b/volumes" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.533914 4762 generic.go:334] "Generic (PLEG): container finished" podID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerID="7ac811229727a3c8f0eb0f82aa12c351749d73f731bf2226728c9b4c61babe20" exitCode=0 Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.534008 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerDied","Data":"7ac811229727a3c8f0eb0f82aa12c351749d73f731bf2226728c9b4c61babe20"} Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.923236 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.947890 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:47:36 crc kubenswrapper[4762]: E1009 13:47:36.948252 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="probe" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.948272 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="probe" Oct 09 13:47:36 crc kubenswrapper[4762]: E1009 13:47:36.948284 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="cinder-scheduler" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.948292 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="cinder-scheduler" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.948461 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="cinder-scheduler" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.948481 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" containerName="probe" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.949348 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.953719 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.954011 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.954162 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995547 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995644 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995674 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995698 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltzlw\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995769 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995847 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.995868 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:36 crc kubenswrapper[4762]: I1009 13:47:36.996010 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.003040 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.098860 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.099195 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.099307 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.099791 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jm6s\" (UniqueName: \"kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.099931 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.100043 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom\") pod \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\" (UID: \"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0\") " Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.099658 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.100424 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.100518 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.100627 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltzlw\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.100780 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.103731 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.103838 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.104006 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.104275 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.104438 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.102247 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.106577 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.107590 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s" (OuterVolumeSpecName: "kube-api-access-9jm6s") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "kube-api-access-9jm6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.109509 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.110860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.114143 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.114803 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.116088 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts" (OuterVolumeSpecName: "scripts") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.121932 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltzlw\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.125272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.126423 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle\") pod \"swift-proxy-6d457f6487-fjznb\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.193808 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.205608 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.205662 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jm6s\" (UniqueName: \"kubernetes.io/projected/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-kube-api-access-9jm6s\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.205676 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.205689 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.254248 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data" (OuterVolumeSpecName: "config-data") pod "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" (UID: "80d06660-db7a-4142-9fd5-d8f7b8d0d6b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.300760 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.308369 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.552907 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"80d06660-db7a-4142-9fd5-d8f7b8d0d6b0","Type":"ContainerDied","Data":"88f345dfc47a21edc6dcdfefcaf92ea050b5f8f3fe009e225037a632fe94f4fb"} Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.553233 4762 scope.go:117] "RemoveContainer" containerID="10f774138f452fa387d822227469296560a775c504e41fe66ea8198260246abe" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.553424 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.598857 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.602812 4762 scope.go:117] "RemoveContainer" containerID="7ac811229727a3c8f0eb0f82aa12c351749d73f731bf2226728c9b4c61babe20" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.617595 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.644777 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.652505 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.656170 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.693721 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.755117 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.794753 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.795121 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-central-agent" containerID="cri-o://7da27e667e32bf44aad3e2ed6cf4fb05fb6f8f01d096a1d26339c6da6bfcee64" gracePeriod=30 Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.796653 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" containerID="cri-o://b84c270230770eba306f6f7c5ff2016fd300727513d4cbab2f10d8f2ac3f67db" gracePeriod=30 Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.796846 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-notification-agent" containerID="cri-o://813e916f60e18760c92180d5f44be9adc64b7b48bc273515999d583d8aeb9f3a" gracePeriod=30 Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.796930 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="sg-core" containerID="cri-o://6cb4cb71d0041ba3ef3f2c9b89eaed8bf58f9438ccfbb0b77627b674729d7b79" gracePeriod=30 Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.808664 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": EOF" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822048 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822095 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822176 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822190 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822227 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.822272 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bpwg\" (UniqueName: \"kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.923620 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.923977 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.924026 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.924080 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bpwg\" (UniqueName: \"kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.924125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.924155 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.924241 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.929702 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.931159 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.934019 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.938128 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.945130 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bpwg\" (UniqueName: \"kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg\") pod \"cinder-scheduler-0\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " pod="openstack/cinder-scheduler-0" Oct 09 13:47:37 crc kubenswrapper[4762]: I1009 13:47:37.990399 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.578367 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerStarted","Data":"8f98e1f5816b4f5c7f33e6e177e326aadaed2d9b0b12f6488966a24db673528b"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.578735 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerStarted","Data":"b490b4f39e6ea2a1af800e6ebc54b51119c9ebfcf3135bde77ab2dd308d6f0a1"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.578753 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerStarted","Data":"ebaf6d9d15a742a3028e27d7a8fada2a6b24cdf909945eca4a67d427756bb271"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.579286 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.579497 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.579889 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588676 4762 generic.go:334] "Generic (PLEG): container finished" podID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerID="b84c270230770eba306f6f7c5ff2016fd300727513d4cbab2f10d8f2ac3f67db" exitCode=0 Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588735 4762 generic.go:334] "Generic (PLEG): container finished" podID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerID="6cb4cb71d0041ba3ef3f2c9b89eaed8bf58f9438ccfbb0b77627b674729d7b79" exitCode=2 Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588743 4762 generic.go:334] "Generic (PLEG): container finished" podID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerID="7da27e667e32bf44aad3e2ed6cf4fb05fb6f8f01d096a1d26339c6da6bfcee64" exitCode=0 Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588766 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerDied","Data":"b84c270230770eba306f6f7c5ff2016fd300727513d4cbab2f10d8f2ac3f67db"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588791 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerDied","Data":"6cb4cb71d0041ba3ef3f2c9b89eaed8bf58f9438ccfbb0b77627b674729d7b79"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.588803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerDied","Data":"7da27e667e32bf44aad3e2ed6cf4fb05fb6f8f01d096a1d26339c6da6bfcee64"} Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.619854 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6d457f6487-fjznb" podStartSLOduration=2.619817842 podStartE2EDuration="2.619817842s" podCreationTimestamp="2025-10-09 13:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:38.609882095 +0000 UTC m=+1334.383673134" watchObservedRunningTime="2025-10-09 13:47:38.619817842 +0000 UTC m=+1334.393608891" Oct 09 13:47:38 crc kubenswrapper[4762]: I1009 13:47:38.979733 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d06660-db7a-4142-9fd5-d8f7b8d0d6b0" path="/var/lib/kubelet/pods/80d06660-db7a-4142-9fd5-d8f7b8d0d6b0/volumes" Oct 09 13:47:39 crc kubenswrapper[4762]: I1009 13:47:39.481117 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 09 13:47:39 crc kubenswrapper[4762]: I1009 13:47:39.540943 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:39 crc kubenswrapper[4762]: I1009 13:47:39.709244 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerStarted","Data":"62e66f771f074a66fee0a95196e2984ab7d958da1caa8b50ceaee3f6fd946317"} Oct 09 13:47:39 crc kubenswrapper[4762]: I1009 13:47:39.709288 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerStarted","Data":"f367df9c259cccde59cafbfd916f63c303f3d56e11c0b92da7112d53f666f700"} Oct 09 13:47:40 crc kubenswrapper[4762]: I1009 13:47:40.729367 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerStarted","Data":"be1b69b00d7fde9a938acf59f47549f46af90d5b80b3f6b68e63608f6eeeefc0"} Oct 09 13:47:40 crc kubenswrapper[4762]: I1009 13:47:40.749212 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.74919036 podStartE2EDuration="3.74919036s" podCreationTimestamp="2025-10-09 13:47:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:40.746504074 +0000 UTC m=+1336.520295113" watchObservedRunningTime="2025-10-09 13:47:40.74919036 +0000 UTC m=+1336.522981399" Oct 09 13:47:41 crc kubenswrapper[4762]: I1009 13:47:41.756588 4762 generic.go:334] "Generic (PLEG): container finished" podID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerID="813e916f60e18760c92180d5f44be9adc64b7b48bc273515999d583d8aeb9f3a" exitCode=0 Oct 09 13:47:41 crc kubenswrapper[4762]: I1009 13:47:41.756662 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerDied","Data":"813e916f60e18760c92180d5f44be9adc64b7b48bc273515999d583d8aeb9f3a"} Oct 09 13:47:42 crc kubenswrapper[4762]: I1009 13:47:42.513621 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": dial tcp 10.217.0.157:3000: connect: connection refused" Oct 09 13:47:42 crc kubenswrapper[4762]: I1009 13:47:42.991729 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.383522 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.383843 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.643128 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.722011 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.722462 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d57f8f89d-6fc59" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-httpd" containerID="cri-o://3622a8256ddb6de752f1030721eca2f21adbf00ed0b2d1c1143da9c780959d68" gracePeriod=30 Oct 09 13:47:43 crc kubenswrapper[4762]: I1009 13:47:43.722275 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d57f8f89d-6fc59" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-api" containerID="cri-o://4ef0705bb5b5f705730dcbf274c818e7737e519b60eb4c19dd124e106257e058" gracePeriod=30 Oct 09 13:47:44 crc kubenswrapper[4762]: I1009 13:47:44.796230 4762 generic.go:334] "Generic (PLEG): container finished" podID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerID="3622a8256ddb6de752f1030721eca2f21adbf00ed0b2d1c1143da9c780959d68" exitCode=0 Oct 09 13:47:44 crc kubenswrapper[4762]: I1009 13:47:44.796286 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerDied","Data":"3622a8256ddb6de752f1030721eca2f21adbf00ed0b2d1c1143da9c780959d68"} Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.552471 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.553020 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-log" containerID="cri-o://3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48" gracePeriod=30 Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.553163 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-httpd" containerID="cri-o://398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e" gracePeriod=30 Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.839089 4762 generic.go:334] "Generic (PLEG): container finished" podID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerID="3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48" exitCode=143 Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.839364 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerDied","Data":"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48"} Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.866993 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-tz9c8"] Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.868218 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.876371 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-tz9c8"] Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.937837 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc7rt\" (UniqueName: \"kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt\") pod \"nova-api-db-create-tz9c8\" (UID: \"ea5d9438-f638-4ddb-9c18-56bad695b73f\") " pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.960680 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-q674c"] Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.961905 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:46 crc kubenswrapper[4762]: I1009 13:47:46.990749 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q674c"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.041075 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc7rt\" (UniqueName: \"kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt\") pod \"nova-api-db-create-tz9c8\" (UID: \"ea5d9438-f638-4ddb-9c18-56bad695b73f\") " pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.041162 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b5t4\" (UniqueName: \"kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4\") pod \"nova-cell0-db-create-q674c\" (UID: \"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac\") " pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.063442 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc7rt\" (UniqueName: \"kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt\") pod \"nova-api-db-create-tz9c8\" (UID: \"ea5d9438-f638-4ddb-9c18-56bad695b73f\") " pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.143023 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b5t4\" (UniqueName: \"kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4\") pod \"nova-cell0-db-create-q674c\" (UID: \"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac\") " pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.162065 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-r4x7g"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.163352 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b5t4\" (UniqueName: \"kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4\") pod \"nova-cell0-db-create-q674c\" (UID: \"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac\") " pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.163469 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.179261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-r4x7g"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.187705 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.215478 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.246228 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2mhv\" (UniqueName: \"kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv\") pod \"nova-cell1-db-create-r4x7g\" (UID: \"82614827-1884-4faa-9f97-2fd96ec29941\") " pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.309724 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.317548 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.347979 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348029 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348062 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348084 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348287 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348333 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.348397 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wlst\" (UniqueName: \"kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst\") pod \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\" (UID: \"bfb04dec-2e18-4fcb-a065-e859a3573ee4\") " Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.349841 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.350517 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.355097 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2mhv\" (UniqueName: \"kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv\") pod \"nova-cell1-db-create-r4x7g\" (UID: \"82614827-1884-4faa-9f97-2fd96ec29941\") " pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.362779 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.364851 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts" (OuterVolumeSpecName: "scripts") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.365017 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst" (OuterVolumeSpecName: "kube-api-access-5wlst") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "kube-api-access-5wlst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.377125 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2mhv\" (UniqueName: \"kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv\") pod \"nova-cell1-db-create-r4x7g\" (UID: \"82614827-1884-4faa-9f97-2fd96ec29941\") " pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.409177 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.457396 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wlst\" (UniqueName: \"kubernetes.io/projected/bfb04dec-2e18-4fcb-a065-e859a3573ee4-kube-api-access-5wlst\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.457430 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.457443 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.457454 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bfb04dec-2e18-4fcb-a065-e859a3573ee4-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.457464 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.485766 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.500541 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data" (OuterVolumeSpecName: "config-data") pod "bfb04dec-2e18-4fcb-a065-e859a3573ee4" (UID: "bfb04dec-2e18-4fcb-a065-e859a3573ee4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.503017 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.561513 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.561547 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfb04dec-2e18-4fcb-a065-e859a3573ee4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.638962 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.639532 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-log" containerID="cri-o://00d36f7c2beef733a43b4d1caf4716e4c4c2c7fdd8d236f178fa3e8ccfc978d5" gracePeriod=30 Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.639837 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-httpd" containerID="cri-o://d4a0bba0c2ec293c5641e19b646eb1580a7b1bbcfd4e0c9265398b15bcaca662" gracePeriod=30 Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.702944 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-tz9c8"] Oct 09 13:47:47 crc kubenswrapper[4762]: W1009 13:47:47.704031 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea5d9438_f638_4ddb_9c18_56bad695b73f.slice/crio-6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f WatchSource:0}: Error finding container 6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f: Status 404 returned error can't find the container with id 6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.861096 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tz9c8" event={"ID":"ea5d9438-f638-4ddb-9c18-56bad695b73f","Type":"ContainerStarted","Data":"6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f"} Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.867776 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1862d6d3-5d91-47cf-8b78-c0298569ee90","Type":"ContainerStarted","Data":"4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e"} Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.870028 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-q674c"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.873867 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bfb04dec-2e18-4fcb-a065-e859a3573ee4","Type":"ContainerDied","Data":"4f55ff853efdbb8502fda62a7ac534409ccbb3edb1a2ac61f71cf54811321910"} Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.873917 4762 scope.go:117] "RemoveContainer" containerID="b84c270230770eba306f6f7c5ff2016fd300727513d4cbab2f10d8f2ac3f67db" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.874075 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.878993 4762 generic.go:334] "Generic (PLEG): container finished" podID="32c327f2-9986-410e-972f-4913811fb1d8" containerID="00d36f7c2beef733a43b4d1caf4716e4c4c2c7fdd8d236f178fa3e8ccfc978d5" exitCode=143 Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.879790 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerDied","Data":"00d36f7c2beef733a43b4d1caf4716e4c4c2c7fdd8d236f178fa3e8ccfc978d5"} Oct 09 13:47:47 crc kubenswrapper[4762]: W1009 13:47:47.888026 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06c5c8d1_2d20_4747_91ff_703b4fa7d1ac.slice/crio-f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc WatchSource:0}: Error finding container f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc: Status 404 returned error can't find the container with id f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.890059 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.371542525 podStartE2EDuration="14.890037436s" podCreationTimestamp="2025-10-09 13:47:33 +0000 UTC" firstStartedPulling="2025-10-09 13:47:34.383563182 +0000 UTC m=+1330.157354221" lastFinishedPulling="2025-10-09 13:47:46.902058093 +0000 UTC m=+1342.675849132" observedRunningTime="2025-10-09 13:47:47.882652643 +0000 UTC m=+1343.656443692" watchObservedRunningTime="2025-10-09 13:47:47.890037436 +0000 UTC m=+1343.663828475" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.935159 4762 scope.go:117] "RemoveContainer" containerID="6cb4cb71d0041ba3ef3f2c9b89eaed8bf58f9438ccfbb0b77627b674729d7b79" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.962937 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.976302 4762 scope.go:117] "RemoveContainer" containerID="813e916f60e18760c92180d5f44be9adc64b7b48bc273515999d583d8aeb9f3a" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.979340 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.999070 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:47 crc kubenswrapper[4762]: E1009 13:47:47.999508 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="sg-core" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.999522 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="sg-core" Oct 09 13:47:47 crc kubenswrapper[4762]: E1009 13:47:47.999536 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-notification-agent" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.999542 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-notification-agent" Oct 09 13:47:47 crc kubenswrapper[4762]: E1009 13:47:47.999560 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-central-agent" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.999566 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-central-agent" Oct 09 13:47:47 crc kubenswrapper[4762]: E1009 13:47:47.999587 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" Oct 09 13:47:47 crc kubenswrapper[4762]: I1009 13:47:47.999592 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.000317 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="proxy-httpd" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.000333 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="sg-core" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.000343 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-central-agent" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.000351 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" containerName="ceilometer-notification-agent" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.002353 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.016752 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.017064 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.019362 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.082378 4762 scope.go:117] "RemoveContainer" containerID="7da27e667e32bf44aad3e2ed6cf4fb05fb6f8f01d096a1d26339c6da6bfcee64" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.086489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.086633 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.086726 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.086841 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.087121 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mnnq\" (UniqueName: \"kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.087267 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.087347 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.090480 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-r4x7g"] Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.188848 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mnnq\" (UniqueName: \"kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.188967 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.188993 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189055 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189105 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189155 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189686 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.189929 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.200518 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.200690 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.200782 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.206266 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.211005 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mnnq\" (UniqueName: \"kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq\") pod \"ceilometer-0\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.280213 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.367197 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.843175 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:48 crc kubenswrapper[4762]: W1009 13:47:48.853378 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b7fd2eb_19c6_4e4c_bc6b_ac907ef01123.slice/crio-368e8767fa204c9479349d7ff55b9fb374694899b415dee2cddba6e2fa03a666 WatchSource:0}: Error finding container 368e8767fa204c9479349d7ff55b9fb374694899b415dee2cddba6e2fa03a666: Status 404 returned error can't find the container with id 368e8767fa204c9479349d7ff55b9fb374694899b415dee2cddba6e2fa03a666 Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.892004 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerStarted","Data":"368e8767fa204c9479349d7ff55b9fb374694899b415dee2cddba6e2fa03a666"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.894128 4762 generic.go:334] "Generic (PLEG): container finished" podID="82614827-1884-4faa-9f97-2fd96ec29941" containerID="abaaec191e407bc97217c5f92e259a12b30db1309fdd44b36367c5b12adcc2c9" exitCode=0 Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.894191 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-r4x7g" event={"ID":"82614827-1884-4faa-9f97-2fd96ec29941","Type":"ContainerDied","Data":"abaaec191e407bc97217c5f92e259a12b30db1309fdd44b36367c5b12adcc2c9"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.894213 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-r4x7g" event={"ID":"82614827-1884-4faa-9f97-2fd96ec29941","Type":"ContainerStarted","Data":"8f4552f8d8e1f93072528d0a1964605722f71b720bf8489390ef8bb6c7b1af61"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.896034 4762 generic.go:334] "Generic (PLEG): container finished" podID="06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" containerID="f3b57929bf460bb7882b9c839b7ea6b53b8415ab3d700492d992cd90071380dc" exitCode=0 Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.896108 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q674c" event={"ID":"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac","Type":"ContainerDied","Data":"f3b57929bf460bb7882b9c839b7ea6b53b8415ab3d700492d992cd90071380dc"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.896135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q674c" event={"ID":"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac","Type":"ContainerStarted","Data":"f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.900025 4762 generic.go:334] "Generic (PLEG): container finished" podID="ea5d9438-f638-4ddb-9c18-56bad695b73f" containerID="5134850a825fe3c5d0245bded97f8dc054bc2356ca74ae7f948894a5bc3d4c48" exitCode=0 Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.900108 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tz9c8" event={"ID":"ea5d9438-f638-4ddb-9c18-56bad695b73f","Type":"ContainerDied","Data":"5134850a825fe3c5d0245bded97f8dc054bc2356ca74ae7f948894a5bc3d4c48"} Oct 09 13:47:48 crc kubenswrapper[4762]: I1009 13:47:48.976552 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfb04dec-2e18-4fcb-a065-e859a3573ee4" path="/var/lib/kubelet/pods/bfb04dec-2e18-4fcb-a065-e859a3573ee4/volumes" Oct 09 13:47:49 crc kubenswrapper[4762]: I1009 13:47:49.720497 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.460799 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.470907 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.488327 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.509490 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.557843 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b5t4\" (UniqueName: \"kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4\") pod \"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac\" (UID: \"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.557975 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2mhv\" (UniqueName: \"kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv\") pod \"82614827-1884-4faa-9f97-2fd96ec29941\" (UID: \"82614827-1884-4faa-9f97-2fd96ec29941\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.558026 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dc7rt\" (UniqueName: \"kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt\") pod \"ea5d9438-f638-4ddb-9c18-56bad695b73f\" (UID: \"ea5d9438-f638-4ddb-9c18-56bad695b73f\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.566989 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt" (OuterVolumeSpecName: "kube-api-access-dc7rt") pod "ea5d9438-f638-4ddb-9c18-56bad695b73f" (UID: "ea5d9438-f638-4ddb-9c18-56bad695b73f"). InnerVolumeSpecName "kube-api-access-dc7rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.571268 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv" (OuterVolumeSpecName: "kube-api-access-h2mhv") pod "82614827-1884-4faa-9f97-2fd96ec29941" (UID: "82614827-1884-4faa-9f97-2fd96ec29941"). InnerVolumeSpecName "kube-api-access-h2mhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.571465 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4" (OuterVolumeSpecName: "kube-api-access-7b5t4") pod "06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" (UID: "06c5c8d1-2d20-4747-91ff-703b4fa7d1ac"). InnerVolumeSpecName "kube-api-access-7b5t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.659774 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.659908 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.659943 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660054 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660074 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660156 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcrfz\" (UniqueName: \"kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660189 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660296 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\" (UID: \"4b8ec4f0-adb7-41f8-8552-f23a5dfca100\") " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660775 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660805 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b5t4\" (UniqueName: \"kubernetes.io/projected/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac-kube-api-access-7b5t4\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660904 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2mhv\" (UniqueName: \"kubernetes.io/projected/82614827-1884-4faa-9f97-2fd96ec29941-kube-api-access-h2mhv\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.660924 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dc7rt\" (UniqueName: \"kubernetes.io/projected/ea5d9438-f638-4ddb-9c18-56bad695b73f-kube-api-access-dc7rt\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.661087 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs" (OuterVolumeSpecName: "logs") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.663920 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts" (OuterVolumeSpecName: "scripts") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.664621 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.665000 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz" (OuterVolumeSpecName: "kube-api-access-xcrfz") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "kube-api-access-xcrfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.708854 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.748683 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765569 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765609 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765620 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765628 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765657 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765672 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcrfz\" (UniqueName: \"kubernetes.io/projected/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-kube-api-access-xcrfz\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.765685 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.785601 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data" (OuterVolumeSpecName: "config-data") pod "4b8ec4f0-adb7-41f8-8552-f23a5dfca100" (UID: "4b8ec4f0-adb7-41f8-8552-f23a5dfca100"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.803580 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.867542 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ec4f0-adb7-41f8-8552-f23a5dfca100-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.867581 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.925556 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-r4x7g" event={"ID":"82614827-1884-4faa-9f97-2fd96ec29941","Type":"ContainerDied","Data":"8f4552f8d8e1f93072528d0a1964605722f71b720bf8489390ef8bb6c7b1af61"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.925599 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f4552f8d8e1f93072528d0a1964605722f71b720bf8489390ef8bb6c7b1af61" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.925678 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-r4x7g" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.936952 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-q674c" event={"ID":"06c5c8d1-2d20-4747-91ff-703b4fa7d1ac","Type":"ContainerDied","Data":"f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.937306 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6058eb6251049cab9fbbfb95cec2107d1a256c9e5a74f9427ad70028a05fedc" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.936995 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-q674c" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.940738 4762 generic.go:334] "Generic (PLEG): container finished" podID="32c327f2-9986-410e-972f-4913811fb1d8" containerID="d4a0bba0c2ec293c5641e19b646eb1580a7b1bbcfd4e0c9265398b15bcaca662" exitCode=0 Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.940795 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerDied","Data":"d4a0bba0c2ec293c5641e19b646eb1580a7b1bbcfd4e0c9265398b15bcaca662"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.944246 4762 generic.go:334] "Generic (PLEG): container finished" podID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerID="398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e" exitCode=0 Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.944320 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerDied","Data":"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.944346 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4b8ec4f0-adb7-41f8-8552-f23a5dfca100","Type":"ContainerDied","Data":"74f2ca44dc16b904dd3391648182e0bc22ee69e28356595de3f675b706d33d9f"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.944365 4762 scope.go:117] "RemoveContainer" containerID="398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.944946 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.954891 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tz9c8" event={"ID":"ea5d9438-f638-4ddb-9c18-56bad695b73f","Type":"ContainerDied","Data":"6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.954929 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cdc4470c11442884194efd1400eb687edc9bda5990f4e266718da4cb0b30c5f" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.954998 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tz9c8" Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.962856 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerStarted","Data":"4360b7182b12248ec01332c42aea20c221f3505281751393202dd297e3042f4a"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.962901 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerStarted","Data":"8e515cd068684b20f7535bcda6ad9e60e777251bb78a300bd0b81c6531d0beca"} Oct 09 13:47:50 crc kubenswrapper[4762]: I1009 13:47:50.980481 4762 scope.go:117] "RemoveContainer" containerID="3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.088526 4762 scope.go:117] "RemoveContainer" containerID="398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.092824 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e\": container with ID starting with 398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e not found: ID does not exist" containerID="398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.092882 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e"} err="failed to get container status \"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e\": rpc error: code = NotFound desc = could not find container \"398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e\": container with ID starting with 398df6e246619b4b62d1ebd1853ebe221dc9d0e416c32b6e225df3d6fc27772e not found: ID does not exist" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.092917 4762 scope.go:117] "RemoveContainer" containerID="3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.093354 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48\": container with ID starting with 3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48 not found: ID does not exist" containerID="3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.093386 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48"} err="failed to get container status \"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48\": rpc error: code = NotFound desc = could not find container \"3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48\": container with ID starting with 3ee2ecd42d3541592dfa60b557278e6f9269df0e7fe3828bf670134b10634c48 not found: ID does not exist" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.133095 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.145593 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.161501 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.162309 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162328 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.162354 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82614827-1884-4faa-9f97-2fd96ec29941" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162362 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="82614827-1884-4faa-9f97-2fd96ec29941" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.162385 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-log" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162395 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-log" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.162411 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5d9438-f638-4ddb-9c18-56bad695b73f" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162420 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5d9438-f638-4ddb-9c18-56bad695b73f" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: E1009 13:47:51.162431 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-httpd" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162439 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-httpd" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162659 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-httpd" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162677 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="82614827-1884-4faa-9f97-2fd96ec29941" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162696 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" containerName="glance-log" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162707 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5d9438-f638-4ddb-9c18-56bad695b73f" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.162729 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" containerName="mariadb-database-create" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.163885 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.165976 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.169028 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.179145 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.282492 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.282558 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn2zx\" (UniqueName: \"kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.282584 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.282757 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.282891 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.283006 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.283025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.283137 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385215 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn2zx\" (UniqueName: \"kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385270 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385320 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385365 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385427 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385446 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385534 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.385621 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.387342 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.387743 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.388940 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.391049 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.393207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.394355 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.396263 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.422100 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn2zx\" (UniqueName: \"kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.423471 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.482332 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.549504 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.691578 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.691866 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.691969 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.692018 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.692074 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.692134 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.692168 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5x77\" (UniqueName: \"kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.692216 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle\") pod \"32c327f2-9986-410e-972f-4913811fb1d8\" (UID: \"32c327f2-9986-410e-972f-4913811fb1d8\") " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.693305 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs" (OuterVolumeSpecName: "logs") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.695936 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.700075 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77" (OuterVolumeSpecName: "kube-api-access-z5x77") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "kube-api-access-z5x77". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.707871 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.709084 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts" (OuterVolumeSpecName: "scripts") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.766540 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data" (OuterVolumeSpecName: "config-data") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.780764 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794314 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794344 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32c327f2-9986-410e-972f-4913811fb1d8-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794355 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794381 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794390 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794399 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5x77\" (UniqueName: \"kubernetes.io/projected/32c327f2-9986-410e-972f-4913811fb1d8-kube-api-access-z5x77\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.794408 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.812799 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "32c327f2-9986-410e-972f-4913811fb1d8" (UID: "32c327f2-9986-410e-972f-4913811fb1d8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.813412 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.895716 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.895751 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c327f2-9986-410e-972f-4913811fb1d8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.973893 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"32c327f2-9986-410e-972f-4913811fb1d8","Type":"ContainerDied","Data":"b1640c29a8e318609d2ba4aaca20d4844f5e228bbb80d9c3110f56c114c554f5"} Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.973906 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.973956 4762 scope.go:117] "RemoveContainer" containerID="d4a0bba0c2ec293c5641e19b646eb1580a7b1bbcfd4e0c9265398b15bcaca662" Oct 09 13:47:51 crc kubenswrapper[4762]: I1009 13:47:51.981673 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerStarted","Data":"6ecd20af377a3a628e49182f39bbc41ab37268d7ce7bfe51c86b780a40aa3346"} Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.000109 4762 scope.go:117] "RemoveContainer" containerID="00d36f7c2beef733a43b4d1caf4716e4c4c2c7fdd8d236f178fa3e8ccfc978d5" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.040629 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.095196 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.102606 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:52 crc kubenswrapper[4762]: E1009 13:47:52.103161 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-httpd" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.103187 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-httpd" Oct 09 13:47:52 crc kubenswrapper[4762]: E1009 13:47:52.103206 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-log" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.103213 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-log" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.103425 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-log" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.103447 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c327f2-9986-410e-972f-4913811fb1d8" containerName="glance-httpd" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.105113 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.107884 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.108114 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.112288 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.216848 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26dvs\" (UniqueName: \"kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217347 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217388 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217423 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217447 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217468 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217506 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.217677 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.225964 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319830 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319904 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26dvs\" (UniqueName: \"kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319923 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319944 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319968 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319983 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.319997 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.320035 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.321487 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.321533 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.321908 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.332462 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.332481 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.332989 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.338043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.346024 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26dvs\" (UniqueName: \"kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.367464 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.443941 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.983777 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c327f2-9986-410e-972f-4913811fb1d8" path="/var/lib/kubelet/pods/32c327f2-9986-410e-972f-4913811fb1d8/volumes" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.985273 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b8ec4f0-adb7-41f8-8552-f23a5dfca100" path="/var/lib/kubelet/pods/4b8ec4f0-adb7-41f8-8552-f23a5dfca100/volumes" Oct 09 13:47:52 crc kubenswrapper[4762]: I1009 13:47:52.991180 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:47:53 crc kubenswrapper[4762]: I1009 13:47:53.003852 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerStarted","Data":"f76d55bec71856953e9258a33e1c4393f28326a0fc15237e8dc359904b57e3fa"} Oct 09 13:47:53 crc kubenswrapper[4762]: I1009 13:47:53.003906 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerStarted","Data":"24687b1f80f20771e10a0ca92079f324b5ab3c58e49b31d7149cb999c71606bf"} Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.029516 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerStarted","Data":"00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3"} Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.029816 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerStarted","Data":"7b0dfec2a83c65060fee92926a69b329defaf1a934d53b00c8a4b63777d1197f"} Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.033505 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerStarted","Data":"e1cf42e1e55e57e01124a220f5977d775fad1808eb503cb4e273e0937ebaa12d"} Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045284 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerStarted","Data":"3ad5e682628724dc339b551bed3b214c07ac3ebffe24dc0f7e96b5a478e1c131"} Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045449 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-central-agent" containerID="cri-o://8e515cd068684b20f7535bcda6ad9e60e777251bb78a300bd0b81c6531d0beca" gracePeriod=30 Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045514 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045537 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="sg-core" containerID="cri-o://6ecd20af377a3a628e49182f39bbc41ab37268d7ce7bfe51c86b780a40aa3346" gracePeriod=30 Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045576 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-notification-agent" containerID="cri-o://4360b7182b12248ec01332c42aea20c221f3505281751393202dd297e3042f4a" gracePeriod=30 Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.045714 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="proxy-httpd" containerID="cri-o://3ad5e682628724dc339b551bed3b214c07ac3ebffe24dc0f7e96b5a478e1c131" gracePeriod=30 Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.062385 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.062322891 podStartE2EDuration="3.062322891s" podCreationTimestamp="2025-10-09 13:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:54.056739213 +0000 UTC m=+1349.830530252" watchObservedRunningTime="2025-10-09 13:47:54.062322891 +0000 UTC m=+1349.836113920" Oct 09 13:47:54 crc kubenswrapper[4762]: I1009 13:47:54.078276 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.959879108 podStartE2EDuration="7.078258236s" podCreationTimestamp="2025-10-09 13:47:47 +0000 UTC" firstStartedPulling="2025-10-09 13:47:48.87081573 +0000 UTC m=+1344.644606769" lastFinishedPulling="2025-10-09 13:47:52.989194858 +0000 UTC m=+1348.762985897" observedRunningTime="2025-10-09 13:47:54.075160409 +0000 UTC m=+1349.848951438" watchObservedRunningTime="2025-10-09 13:47:54.078258236 +0000 UTC m=+1349.852049275" Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.059076 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerStarted","Data":"13924ee637a2d8e6d84bc0168317a287fc78cd42d90586f3f1c1c46d4723c487"} Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.062935 4762 generic.go:334] "Generic (PLEG): container finished" podID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerID="3ad5e682628724dc339b551bed3b214c07ac3ebffe24dc0f7e96b5a478e1c131" exitCode=0 Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.062973 4762 generic.go:334] "Generic (PLEG): container finished" podID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerID="6ecd20af377a3a628e49182f39bbc41ab37268d7ce7bfe51c86b780a40aa3346" exitCode=2 Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.062987 4762 generic.go:334] "Generic (PLEG): container finished" podID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerID="4360b7182b12248ec01332c42aea20c221f3505281751393202dd297e3042f4a" exitCode=0 Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.063964 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerDied","Data":"3ad5e682628724dc339b551bed3b214c07ac3ebffe24dc0f7e96b5a478e1c131"} Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.064123 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerDied","Data":"6ecd20af377a3a628e49182f39bbc41ab37268d7ce7bfe51c86b780a40aa3346"} Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.064142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerDied","Data":"4360b7182b12248ec01332c42aea20c221f3505281751393202dd297e3042f4a"} Oct 09 13:47:55 crc kubenswrapper[4762]: I1009 13:47:55.090725 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.090706505 podStartE2EDuration="3.090706505s" podCreationTimestamp="2025-10-09 13:47:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:55.089477695 +0000 UTC m=+1350.863268734" watchObservedRunningTime="2025-10-09 13:47:55.090706505 +0000 UTC m=+1350.864497564" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.075666 4762 generic.go:334] "Generic (PLEG): container finished" podID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerID="4ef0705bb5b5f705730dcbf274c818e7737e519b60eb4c19dd124e106257e058" exitCode=0 Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.075705 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerDied","Data":"4ef0705bb5b5f705730dcbf274c818e7737e519b60eb4c19dd124e106257e058"} Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.206390 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.296142 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config\") pod \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.296305 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xvbd\" (UniqueName: \"kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd\") pod \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.296358 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config\") pod \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.296377 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle\") pod \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.296417 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs\") pod \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\" (UID: \"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6\") " Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.303939 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd" (OuterVolumeSpecName: "kube-api-access-5xvbd") pod "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" (UID: "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6"). InnerVolumeSpecName "kube-api-access-5xvbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.304149 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" (UID: "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.356770 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config" (OuterVolumeSpecName: "config") pod "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" (UID: "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.370503 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" (UID: "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.385747 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" (UID: "60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.398918 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.398983 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xvbd\" (UniqueName: \"kubernetes.io/projected/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-kube-api-access-5xvbd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.399001 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.399013 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:56 crc kubenswrapper[4762]: I1009 13:47:56.399024 4762 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.002513 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-3399-account-create-5j8mn"] Oct 09 13:47:57 crc kubenswrapper[4762]: E1009 13:47:57.002930 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-api" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.002948 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-api" Oct 09 13:47:57 crc kubenswrapper[4762]: E1009 13:47:57.002975 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-httpd" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.002983 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-httpd" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.003186 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-httpd" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.003212 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" containerName="neutron-api" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.003898 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.006041 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.010443 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7ksd\" (UniqueName: \"kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd\") pod \"nova-api-3399-account-create-5j8mn\" (UID: \"947cbe0a-2c1a-490f-b5c3-760760edeb29\") " pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.027264 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3399-account-create-5j8mn"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.087219 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d57f8f89d-6fc59" event={"ID":"60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6","Type":"ContainerDied","Data":"3624264e2bae1f9bd265fd4e638c225f20d7825d82dbc58f581bb669f679a5f6"} Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.087256 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d57f8f89d-6fc59" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.087281 4762 scope.go:117] "RemoveContainer" containerID="3622a8256ddb6de752f1030721eca2f21adbf00ed0b2d1c1143da9c780959d68" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.109780 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.112027 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7ksd\" (UniqueName: \"kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd\") pod \"nova-api-3399-account-create-5j8mn\" (UID: \"947cbe0a-2c1a-490f-b5c3-760760edeb29\") " pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.118891 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-d57f8f89d-6fc59"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.128757 4762 scope.go:117] "RemoveContainer" containerID="4ef0705bb5b5f705730dcbf274c818e7737e519b60eb4c19dd124e106257e058" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.141440 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7ksd\" (UniqueName: \"kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd\") pod \"nova-api-3399-account-create-5j8mn\" (UID: \"947cbe0a-2c1a-490f-b5c3-760760edeb29\") " pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.185842 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-2601-account-create-8jl77"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.187352 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.190910 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.199841 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2601-account-create-8jl77"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.215056 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj5tz\" (UniqueName: \"kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz\") pod \"nova-cell0-2601-account-create-8jl77\" (UID: \"3c047522-42b9-40b8-89df-020222968ff6\") " pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.316799 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj5tz\" (UniqueName: \"kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz\") pod \"nova-cell0-2601-account-create-8jl77\" (UID: \"3c047522-42b9-40b8-89df-020222968ff6\") " pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.325930 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.334224 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj5tz\" (UniqueName: \"kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz\") pod \"nova-cell0-2601-account-create-8jl77\" (UID: \"3c047522-42b9-40b8-89df-020222968ff6\") " pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.397942 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-52a6-account-create-472b8"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.399067 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.402535 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.415762 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-52a6-account-create-472b8"] Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.525011 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.539829 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4bmp\" (UniqueName: \"kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp\") pod \"nova-cell1-52a6-account-create-472b8\" (UID: \"f63ec52b-58fb-4fc2-97ac-2752df71322a\") " pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.641451 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4bmp\" (UniqueName: \"kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp\") pod \"nova-cell1-52a6-account-create-472b8\" (UID: \"f63ec52b-58fb-4fc2-97ac-2752df71322a\") " pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.660560 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4bmp\" (UniqueName: \"kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp\") pod \"nova-cell1-52a6-account-create-472b8\" (UID: \"f63ec52b-58fb-4fc2-97ac-2752df71322a\") " pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.761009 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.836104 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-3399-account-create-5j8mn"] Oct 09 13:47:57 crc kubenswrapper[4762]: W1009 13:47:57.837875 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod947cbe0a_2c1a_490f_b5c3_760760edeb29.slice/crio-e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3 WatchSource:0}: Error finding container e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3: Status 404 returned error can't find the container with id e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3 Oct 09 13:47:57 crc kubenswrapper[4762]: I1009 13:47:57.975923 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2601-account-create-8jl77"] Oct 09 13:47:58 crc kubenswrapper[4762]: W1009 13:47:58.020821 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c047522_42b9_40b8_89df_020222968ff6.slice/crio-8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3 WatchSource:0}: Error finding container 8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3: Status 404 returned error can't find the container with id 8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3 Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.113663 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3399-account-create-5j8mn" event={"ID":"947cbe0a-2c1a-490f-b5c3-760760edeb29","Type":"ContainerStarted","Data":"33532cdc1385d8d48d5a4f7e806770037c4ebb7b34636a19d9790e0370215882"} Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.113720 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3399-account-create-5j8mn" event={"ID":"947cbe0a-2c1a-490f-b5c3-760760edeb29","Type":"ContainerStarted","Data":"e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3"} Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.119905 4762 generic.go:334] "Generic (PLEG): container finished" podID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerID="8e515cd068684b20f7535bcda6ad9e60e777251bb78a300bd0b81c6531d0beca" exitCode=0 Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.119963 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerDied","Data":"8e515cd068684b20f7535bcda6ad9e60e777251bb78a300bd0b81c6531d0beca"} Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.121735 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2601-account-create-8jl77" event={"ID":"3c047522-42b9-40b8-89df-020222968ff6","Type":"ContainerStarted","Data":"8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3"} Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.134838 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-3399-account-create-5j8mn" podStartSLOduration=2.134815371 podStartE2EDuration="2.134815371s" podCreationTimestamp="2025-10-09 13:47:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:47:58.131441297 +0000 UTC m=+1353.905232346" watchObservedRunningTime="2025-10-09 13:47:58.134815371 +0000 UTC m=+1353.908606410" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.225300 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-52a6-account-create-472b8"] Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.282399 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.460841 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.460899 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.460964 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.461066 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mnnq\" (UniqueName: \"kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.461113 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.461174 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.461194 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle\") pod \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\" (UID: \"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123\") " Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.463514 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.465044 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.470011 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq" (OuterVolumeSpecName: "kube-api-access-8mnnq") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "kube-api-access-8mnnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.475108 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts" (OuterVolumeSpecName: "scripts") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.506061 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.558661 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563809 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mnnq\" (UniqueName: \"kubernetes.io/projected/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-kube-api-access-8mnnq\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563835 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563844 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563854 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563864 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.563873 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.581036 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data" (OuterVolumeSpecName: "config-data") pod "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" (UID: "7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.666462 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:47:58 crc kubenswrapper[4762]: I1009 13:47:58.979879 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6" path="/var/lib/kubelet/pods/60dea6d8-cdd4-4cf1-bcac-eb6babcfaac6/volumes" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.137437 4762 generic.go:334] "Generic (PLEG): container finished" podID="f63ec52b-58fb-4fc2-97ac-2752df71322a" containerID="deec9ce762a4c213a0d8e47f10bd04dbcd4e036735519ad0afeea374a17c4aed" exitCode=0 Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.137538 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-52a6-account-create-472b8" event={"ID":"f63ec52b-58fb-4fc2-97ac-2752df71322a","Type":"ContainerDied","Data":"deec9ce762a4c213a0d8e47f10bd04dbcd4e036735519ad0afeea374a17c4aed"} Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.137645 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-52a6-account-create-472b8" event={"ID":"f63ec52b-58fb-4fc2-97ac-2752df71322a","Type":"ContainerStarted","Data":"9e0c8855ea18258d53ef85751495276888270008a2947907ca11c6e6ad04333d"} Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.139648 4762 generic.go:334] "Generic (PLEG): container finished" podID="3c047522-42b9-40b8-89df-020222968ff6" containerID="36299bd2c4061638873528aea794b7532cb17d870f20137ce2bcd06e839991cf" exitCode=0 Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.139722 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2601-account-create-8jl77" event={"ID":"3c047522-42b9-40b8-89df-020222968ff6","Type":"ContainerDied","Data":"36299bd2c4061638873528aea794b7532cb17d870f20137ce2bcd06e839991cf"} Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.141572 4762 generic.go:334] "Generic (PLEG): container finished" podID="947cbe0a-2c1a-490f-b5c3-760760edeb29" containerID="33532cdc1385d8d48d5a4f7e806770037c4ebb7b34636a19d9790e0370215882" exitCode=0 Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.141622 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3399-account-create-5j8mn" event={"ID":"947cbe0a-2c1a-490f-b5c3-760760edeb29","Type":"ContainerDied","Data":"33532cdc1385d8d48d5a4f7e806770037c4ebb7b34636a19d9790e0370215882"} Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.143890 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123","Type":"ContainerDied","Data":"368e8767fa204c9479349d7ff55b9fb374694899b415dee2cddba6e2fa03a666"} Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.143935 4762 scope.go:117] "RemoveContainer" containerID="3ad5e682628724dc339b551bed3b214c07ac3ebffe24dc0f7e96b5a478e1c131" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.144086 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.167806 4762 scope.go:117] "RemoveContainer" containerID="6ecd20af377a3a628e49182f39bbc41ab37268d7ce7bfe51c86b780a40aa3346" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.173835 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.197621 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.199196 4762 scope.go:117] "RemoveContainer" containerID="4360b7182b12248ec01332c42aea20c221f3505281751393202dd297e3042f4a" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.218892 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:59 crc kubenswrapper[4762]: E1009 13:47:59.219243 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-central-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219261 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-central-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: E1009 13:47:59.219275 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="proxy-httpd" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219280 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="proxy-httpd" Oct 09 13:47:59 crc kubenswrapper[4762]: E1009 13:47:59.219295 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-notification-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219302 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-notification-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: E1009 13:47:59.219323 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="sg-core" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219328 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="sg-core" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219485 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="proxy-httpd" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219506 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="sg-core" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219526 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-central-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.219533 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" containerName="ceilometer-notification-agent" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.221088 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.223560 4762 scope.go:117] "RemoveContainer" containerID="8e515cd068684b20f7535bcda6ad9e60e777251bb78a300bd0b81c6531d0beca" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.223580 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.224228 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.243806 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379057 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379112 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379184 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm7ts\" (UniqueName: \"kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379272 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379400 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.379506 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.483033 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.483151 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.483211 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.484069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.484291 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.484442 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.484518 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.484768 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm7ts\" (UniqueName: \"kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.485958 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.488578 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.488912 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.491460 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.499484 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.506621 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm7ts\" (UniqueName: \"kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts\") pod \"ceilometer-0\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " pod="openstack/ceilometer-0" Oct 09 13:47:59 crc kubenswrapper[4762]: I1009 13:47:59.551208 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.059808 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:00 crc kubenswrapper[4762]: W1009 13:48:00.068897 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7716dbd_8885_4d9d_b94e_6f0deb73e11e.slice/crio-4db3597dda824f37f8887e7fc095efff332f25eeb203f278611c502490016498 WatchSource:0}: Error finding container 4db3597dda824f37f8887e7fc095efff332f25eeb203f278611c502490016498: Status 404 returned error can't find the container with id 4db3597dda824f37f8887e7fc095efff332f25eeb203f278611c502490016498 Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.156588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerStarted","Data":"4db3597dda824f37f8887e7fc095efff332f25eeb203f278611c502490016498"} Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.521307 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.618229 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4bmp\" (UniqueName: \"kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp\") pod \"f63ec52b-58fb-4fc2-97ac-2752df71322a\" (UID: \"f63ec52b-58fb-4fc2-97ac-2752df71322a\") " Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.627957 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp" (OuterVolumeSpecName: "kube-api-access-m4bmp") pod "f63ec52b-58fb-4fc2-97ac-2752df71322a" (UID: "f63ec52b-58fb-4fc2-97ac-2752df71322a"). InnerVolumeSpecName "kube-api-access-m4bmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.725891 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4bmp\" (UniqueName: \"kubernetes.io/projected/f63ec52b-58fb-4fc2-97ac-2752df71322a-kube-api-access-m4bmp\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.778972 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.789028 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.827482 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj5tz\" (UniqueName: \"kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz\") pod \"3c047522-42b9-40b8-89df-020222968ff6\" (UID: \"3c047522-42b9-40b8-89df-020222968ff6\") " Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.827531 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7ksd\" (UniqueName: \"kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd\") pod \"947cbe0a-2c1a-490f-b5c3-760760edeb29\" (UID: \"947cbe0a-2c1a-490f-b5c3-760760edeb29\") " Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.832275 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz" (OuterVolumeSpecName: "kube-api-access-rj5tz") pod "3c047522-42b9-40b8-89df-020222968ff6" (UID: "3c047522-42b9-40b8-89df-020222968ff6"). InnerVolumeSpecName "kube-api-access-rj5tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.835947 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd" (OuterVolumeSpecName: "kube-api-access-x7ksd") pod "947cbe0a-2c1a-490f-b5c3-760760edeb29" (UID: "947cbe0a-2c1a-490f-b5c3-760760edeb29"). InnerVolumeSpecName "kube-api-access-x7ksd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.929251 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj5tz\" (UniqueName: \"kubernetes.io/projected/3c047522-42b9-40b8-89df-020222968ff6-kube-api-access-rj5tz\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.929817 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7ksd\" (UniqueName: \"kubernetes.io/projected/947cbe0a-2c1a-490f-b5c3-760760edeb29-kube-api-access-x7ksd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:00 crc kubenswrapper[4762]: I1009 13:48:00.978739 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123" path="/var/lib/kubelet/pods/7b7fd2eb-19c6-4e4c-bc6b-ac907ef01123/volumes" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.167403 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-52a6-account-create-472b8" event={"ID":"f63ec52b-58fb-4fc2-97ac-2752df71322a","Type":"ContainerDied","Data":"9e0c8855ea18258d53ef85751495276888270008a2947907ca11c6e6ad04333d"} Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.167437 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-52a6-account-create-472b8" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.167442 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e0c8855ea18258d53ef85751495276888270008a2947907ca11c6e6ad04333d" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.169588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2601-account-create-8jl77" event={"ID":"3c047522-42b9-40b8-89df-020222968ff6","Type":"ContainerDied","Data":"8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3"} Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.169651 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8249406c9be1bb8956a22e2de03686509f42ef002718bb40fb97dfe61704d8c3" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.169720 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2601-account-create-8jl77" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.172888 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerStarted","Data":"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70"} Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.175076 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-3399-account-create-5j8mn" event={"ID":"947cbe0a-2c1a-490f-b5c3-760760edeb29","Type":"ContainerDied","Data":"e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3"} Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.175105 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-3399-account-create-5j8mn" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.175110 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e925234f8dfa22fcd2a65f862004f64c9dce877a8fe5909bf0d0c14d40fe8ad3" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.483878 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.483918 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.527285 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 13:48:01 crc kubenswrapper[4762]: I1009 13:48:01.536247 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.185749 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerStarted","Data":"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75"} Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.187056 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.187135 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.445238 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.445580 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.505530 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vtlrb"] Oct 09 13:48:02 crc kubenswrapper[4762]: E1009 13:48:02.506043 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c047522-42b9-40b8-89df-020222968ff6" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506070 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c047522-42b9-40b8-89df-020222968ff6" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: E1009 13:48:02.506116 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63ec52b-58fb-4fc2-97ac-2752df71322a" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506128 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63ec52b-58fb-4fc2-97ac-2752df71322a" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: E1009 13:48:02.506138 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947cbe0a-2c1a-490f-b5c3-760760edeb29" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506163 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="947cbe0a-2c1a-490f-b5c3-760760edeb29" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506408 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c047522-42b9-40b8-89df-020222968ff6" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506433 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="947cbe0a-2c1a-490f-b5c3-760760edeb29" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.506446 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f63ec52b-58fb-4fc2-97ac-2752df71322a" containerName="mariadb-account-create" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.507230 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.507332 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.514341 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.514392 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-wkv9z" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.514399 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.519143 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.519892 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vtlrb"] Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.664347 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.664579 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j74x\" (UniqueName: \"kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.664661 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.664731 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.766935 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.767058 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j74x\" (UniqueName: \"kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.767091 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.767120 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.856390 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.861414 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.863293 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j74x\" (UniqueName: \"kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:02 crc kubenswrapper[4762]: I1009 13:48:02.874575 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vtlrb\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:03 crc kubenswrapper[4762]: I1009 13:48:03.131891 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:03 crc kubenswrapper[4762]: I1009 13:48:03.208226 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:03 crc kubenswrapper[4762]: I1009 13:48:03.208572 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:03 crc kubenswrapper[4762]: I1009 13:48:03.691746 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vtlrb"] Oct 09 13:48:03 crc kubenswrapper[4762]: W1009 13:48:03.701988 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8a82ea9_76b8_467c_970d_38cd752458bf.slice/crio-fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba WatchSource:0}: Error finding container fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba: Status 404 returned error can't find the container with id fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba Oct 09 13:48:04 crc kubenswrapper[4762]: I1009 13:48:04.102186 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:04 crc kubenswrapper[4762]: I1009 13:48:04.221896 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" event={"ID":"d8a82ea9-76b8-467c-970d-38cd752458bf","Type":"ContainerStarted","Data":"fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba"} Oct 09 13:48:04 crc kubenswrapper[4762]: I1009 13:48:04.226174 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerStarted","Data":"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d"} Oct 09 13:48:04 crc kubenswrapper[4762]: I1009 13:48:04.226406 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:48:04 crc kubenswrapper[4762]: I1009 13:48:04.226618 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:48:05 crc kubenswrapper[4762]: I1009 13:48:05.175067 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 13:48:05 crc kubenswrapper[4762]: I1009 13:48:05.212696 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 13:48:05 crc kubenswrapper[4762]: I1009 13:48:05.798149 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:05 crc kubenswrapper[4762]: I1009 13:48:05.798259 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 13:48:05 crc kubenswrapper[4762]: I1009 13:48:05.802414 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 13:48:06 crc kubenswrapper[4762]: I1009 13:48:06.276182 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerStarted","Data":"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58"} Oct 09 13:48:06 crc kubenswrapper[4762]: I1009 13:48:06.276689 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-central-agent" containerID="cri-o://f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70" gracePeriod=30 Oct 09 13:48:06 crc kubenswrapper[4762]: I1009 13:48:06.276746 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="sg-core" containerID="cri-o://ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d" gracePeriod=30 Oct 09 13:48:06 crc kubenswrapper[4762]: I1009 13:48:06.276774 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-notification-agent" containerID="cri-o://c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75" gracePeriod=30 Oct 09 13:48:06 crc kubenswrapper[4762]: I1009 13:48:06.276807 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="proxy-httpd" containerID="cri-o://b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58" gracePeriod=30 Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301549 4762 generic.go:334] "Generic (PLEG): container finished" podID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerID="b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58" exitCode=0 Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301813 4762 generic.go:334] "Generic (PLEG): container finished" podID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerID="ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d" exitCode=2 Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301822 4762 generic.go:334] "Generic (PLEG): container finished" podID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerID="c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75" exitCode=0 Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301606 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerDied","Data":"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58"} Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301855 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerDied","Data":"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d"} Oct 09 13:48:07 crc kubenswrapper[4762]: I1009 13:48:07.301868 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerDied","Data":"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75"} Oct 09 13:48:12 crc kubenswrapper[4762]: I1009 13:48:12.936774 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.078828 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.078938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.078990 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.079135 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.079169 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.079256 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm7ts\" (UniqueName: \"kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.079336 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.079987 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd\") pod \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\" (UID: \"f7716dbd-8885-4d9d-b94e-6f0deb73e11e\") " Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.080658 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.081128 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.081159 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.084314 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts" (OuterVolumeSpecName: "scripts") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.085835 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts" (OuterVolumeSpecName: "kube-api-access-lm7ts") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "kube-api-access-lm7ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.112881 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.167162 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.173369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data" (OuterVolumeSpecName: "config-data") pod "f7716dbd-8885-4d9d-b94e-6f0deb73e11e" (UID: "f7716dbd-8885-4d9d-b94e-6f0deb73e11e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.182481 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.182566 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.182582 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.182594 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.182611 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm7ts\" (UniqueName: \"kubernetes.io/projected/f7716dbd-8885-4d9d-b94e-6f0deb73e11e-kube-api-access-lm7ts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.378414 4762 generic.go:334] "Generic (PLEG): container finished" podID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerID="f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70" exitCode=0 Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.378803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerDied","Data":"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70"} Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.378785 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.378878 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7716dbd-8885-4d9d-b94e-6f0deb73e11e","Type":"ContainerDied","Data":"4db3597dda824f37f8887e7fc095efff332f25eeb203f278611c502490016498"} Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.378902 4762 scope.go:117] "RemoveContainer" containerID="b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.398734 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" event={"ID":"d8a82ea9-76b8-467c-970d-38cd752458bf","Type":"ContainerStarted","Data":"a0e83bcda39754cb99b6d3b5627fe4751f64cf85b83b8ef9245ca2828a780a9f"} Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.417821 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" podStartSLOduration=2.77981115 podStartE2EDuration="11.417798903s" podCreationTimestamp="2025-10-09 13:48:02 +0000 UTC" firstStartedPulling="2025-10-09 13:48:03.704852476 +0000 UTC m=+1359.478643515" lastFinishedPulling="2025-10-09 13:48:12.342840229 +0000 UTC m=+1368.116631268" observedRunningTime="2025-10-09 13:48:13.416051068 +0000 UTC m=+1369.189842127" watchObservedRunningTime="2025-10-09 13:48:13.417798903 +0000 UTC m=+1369.191589942" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.452986 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.464356 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.465156 4762 scope.go:117] "RemoveContainer" containerID="ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476024 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.476403 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-notification-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476417 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-notification-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.476437 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-central-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476444 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-central-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.476470 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="sg-core" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476476 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="sg-core" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.476485 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="proxy-httpd" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476490 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="proxy-httpd" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476690 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-notification-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476700 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="sg-core" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476709 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="proxy-httpd" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.476717 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" containerName="ceilometer-central-agent" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.478356 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.480988 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.486133 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.495515 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.531373 4762 scope.go:117] "RemoveContainer" containerID="c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.551411 4762 scope.go:117] "RemoveContainer" containerID="f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.572490 4762 scope.go:117] "RemoveContainer" containerID="b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.573028 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58\": container with ID starting with b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58 not found: ID does not exist" containerID="b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.573075 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58"} err="failed to get container status \"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58\": rpc error: code = NotFound desc = could not find container \"b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58\": container with ID starting with b63f084a58f579855dc0bb699f749bbb868e82f32bf04da2d704d4e17f63aa58 not found: ID does not exist" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.573106 4762 scope.go:117] "RemoveContainer" containerID="ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.573752 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d\": container with ID starting with ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d not found: ID does not exist" containerID="ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.573822 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d"} err="failed to get container status \"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d\": rpc error: code = NotFound desc = could not find container \"ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d\": container with ID starting with ba3a39794b5322469a790dec47f2496c20bce2a8a0c678b23526cf2965f9e53d not found: ID does not exist" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.573876 4762 scope.go:117] "RemoveContainer" containerID="c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.574240 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75\": container with ID starting with c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75 not found: ID does not exist" containerID="c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.574277 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75"} err="failed to get container status \"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75\": rpc error: code = NotFound desc = could not find container \"c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75\": container with ID starting with c1bcb80aca61a9852c30cf183459ab2c565862e987cdec666fedc41c49258b75 not found: ID does not exist" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.574309 4762 scope.go:117] "RemoveContainer" containerID="f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70" Oct 09 13:48:13 crc kubenswrapper[4762]: E1009 13:48:13.575208 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70\": container with ID starting with f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70 not found: ID does not exist" containerID="f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.575239 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70"} err="failed to get container status \"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70\": rpc error: code = NotFound desc = could not find container \"f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70\": container with ID starting with f4aade3578beae9c6962a47e24fa5bb75e253b2b2f38d24fb82263549e048c70 not found: ID does not exist" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.600531 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.600620 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.600712 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.600754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.600786 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.601027 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpxqt\" (UniqueName: \"kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.601085 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.702848 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.702991 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703035 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703066 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703148 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpxqt\" (UniqueName: \"kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703178 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703703 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.703850 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.704013 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.706994 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.707200 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.707438 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.717662 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.726433 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpxqt\" (UniqueName: \"kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt\") pod \"ceilometer-0\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " pod="openstack/ceilometer-0" Oct 09 13:48:13 crc kubenswrapper[4762]: I1009 13:48:13.816863 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:14 crc kubenswrapper[4762]: I1009 13:48:14.297652 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:14 crc kubenswrapper[4762]: W1009 13:48:14.300811 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81246bf0_7924_4dab_9b12_34318201ee1b.slice/crio-fc169d5873b6790f9fde5a2e59028a459160af62f26039cfbd598159e34c86d2 WatchSource:0}: Error finding container fc169d5873b6790f9fde5a2e59028a459160af62f26039cfbd598159e34c86d2: Status 404 returned error can't find the container with id fc169d5873b6790f9fde5a2e59028a459160af62f26039cfbd598159e34c86d2 Oct 09 13:48:14 crc kubenswrapper[4762]: I1009 13:48:14.434142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerStarted","Data":"fc169d5873b6790f9fde5a2e59028a459160af62f26039cfbd598159e34c86d2"} Oct 09 13:48:14 crc kubenswrapper[4762]: I1009 13:48:14.978798 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7716dbd-8885-4d9d-b94e-6f0deb73e11e" path="/var/lib/kubelet/pods/f7716dbd-8885-4d9d-b94e-6f0deb73e11e/volumes" Oct 09 13:48:16 crc kubenswrapper[4762]: I1009 13:48:16.456059 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerStarted","Data":"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe"} Oct 09 13:48:17 crc kubenswrapper[4762]: I1009 13:48:17.467517 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerStarted","Data":"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa"} Oct 09 13:48:17 crc kubenswrapper[4762]: I1009 13:48:17.467796 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerStarted","Data":"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a"} Oct 09 13:48:18 crc kubenswrapper[4762]: I1009 13:48:18.268097 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.491918 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerStarted","Data":"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1"} Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.492486 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-notification-agent" containerID="cri-o://778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a" gracePeriod=30 Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.492387 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="sg-core" containerID="cri-o://7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa" gracePeriod=30 Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.492474 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="proxy-httpd" containerID="cri-o://f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1" gracePeriod=30 Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.492252 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-central-agent" containerID="cri-o://ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe" gracePeriod=30 Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.492812 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:48:19 crc kubenswrapper[4762]: I1009 13:48:19.528026 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7948922729999999 podStartE2EDuration="6.528003904s" podCreationTimestamp="2025-10-09 13:48:13 +0000 UTC" firstStartedPulling="2025-10-09 13:48:14.303897953 +0000 UTC m=+1370.077688992" lastFinishedPulling="2025-10-09 13:48:19.037009584 +0000 UTC m=+1374.810800623" observedRunningTime="2025-10-09 13:48:19.521214377 +0000 UTC m=+1375.295005416" watchObservedRunningTime="2025-10-09 13:48:19.528003904 +0000 UTC m=+1375.301794943" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.502976 4762 generic.go:334] "Generic (PLEG): container finished" podID="81246bf0-7924-4dab-9b12-34318201ee1b" containerID="f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1" exitCode=0 Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.504167 4762 generic.go:334] "Generic (PLEG): container finished" podID="81246bf0-7924-4dab-9b12-34318201ee1b" containerID="7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa" exitCode=2 Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.504268 4762 generic.go:334] "Generic (PLEG): container finished" podID="81246bf0-7924-4dab-9b12-34318201ee1b" containerID="778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a" exitCode=0 Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.504374 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerDied","Data":"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1"} Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.504539 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerDied","Data":"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa"} Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.504646 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerDied","Data":"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a"} Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.832386 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.939832 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.939976 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.940047 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.940083 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpxqt\" (UniqueName: \"kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.940175 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.940219 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.940242 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd\") pod \"81246bf0-7924-4dab-9b12-34318201ee1b\" (UID: \"81246bf0-7924-4dab-9b12-34318201ee1b\") " Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.941115 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.941440 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.946070 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt" (OuterVolumeSpecName: "kube-api-access-fpxqt") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "kube-api-access-fpxqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.946388 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts" (OuterVolumeSpecName: "scripts") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:20 crc kubenswrapper[4762]: I1009 13:48:20.967724 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.012283 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042533 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042571 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/81246bf0-7924-4dab-9b12-34318201ee1b-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042583 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042597 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042609 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.042622 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpxqt\" (UniqueName: \"kubernetes.io/projected/81246bf0-7924-4dab-9b12-34318201ee1b-kube-api-access-fpxqt\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.043370 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data" (OuterVolumeSpecName: "config-data") pod "81246bf0-7924-4dab-9b12-34318201ee1b" (UID: "81246bf0-7924-4dab-9b12-34318201ee1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.144719 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81246bf0-7924-4dab-9b12-34318201ee1b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.516751 4762 generic.go:334] "Generic (PLEG): container finished" podID="81246bf0-7924-4dab-9b12-34318201ee1b" containerID="ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe" exitCode=0 Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.516797 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerDied","Data":"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe"} Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.516816 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.516836 4762 scope.go:117] "RemoveContainer" containerID="f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.516824 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"81246bf0-7924-4dab-9b12-34318201ee1b","Type":"ContainerDied","Data":"fc169d5873b6790f9fde5a2e59028a459160af62f26039cfbd598159e34c86d2"} Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.538389 4762 scope.go:117] "RemoveContainer" containerID="7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.564331 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.567094 4762 scope.go:117] "RemoveContainer" containerID="778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.582827 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.595502 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.595879 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-notification-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.595895 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-notification-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.595920 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="proxy-httpd" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.595926 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="proxy-httpd" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.595942 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="sg-core" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.595948 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="sg-core" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.595963 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-central-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.595968 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-central-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.596109 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="proxy-httpd" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.596126 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-central-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.596136 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="sg-core" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.596153 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" containerName="ceilometer-notification-agent" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.596429 4762 scope.go:117] "RemoveContainer" containerID="ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.597931 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.604186 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.604435 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.613753 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.634940 4762 scope.go:117] "RemoveContainer" containerID="f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.639428 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1\": container with ID starting with f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1 not found: ID does not exist" containerID="f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.639479 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1"} err="failed to get container status \"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1\": rpc error: code = NotFound desc = could not find container \"f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1\": container with ID starting with f8c922f4632f69ffcad6fab1ab4905f77e30900de416b42d4c41ad62567aa7c1 not found: ID does not exist" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.639507 4762 scope.go:117] "RemoveContainer" containerID="7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.643120 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa\": container with ID starting with 7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa not found: ID does not exist" containerID="7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.643182 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa"} err="failed to get container status \"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa\": rpc error: code = NotFound desc = could not find container \"7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa\": container with ID starting with 7b622c4e8be4b3ebfc65fd480d88050cae421d3d093bbb796ba6f721cff95afa not found: ID does not exist" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.643220 4762 scope.go:117] "RemoveContainer" containerID="778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.643780 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a\": container with ID starting with 778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a not found: ID does not exist" containerID="778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.643834 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a"} err="failed to get container status \"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a\": rpc error: code = NotFound desc = could not find container \"778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a\": container with ID starting with 778adf9f17dd022719c8e858cb57b87bacfb7d7375bbf9009b0c47bc70ff2f1a not found: ID does not exist" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.643858 4762 scope.go:117] "RemoveContainer" containerID="ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe" Oct 09 13:48:21 crc kubenswrapper[4762]: E1009 13:48:21.644207 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe\": container with ID starting with ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe not found: ID does not exist" containerID="ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.644236 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe"} err="failed to get container status \"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe\": rpc error: code = NotFound desc = could not find container \"ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe\": container with ID starting with ddff5505cdaae7406498b36dc6a33cc805fd78d6db7091f2671121f93f3fc2fe not found: ID does not exist" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757513 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757609 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757651 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlt92\" (UniqueName: \"kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757707 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757731 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757763 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.757793 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.859562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.859953 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.859993 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860030 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860082 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860168 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860206 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlt92\" (UniqueName: \"kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860412 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.860933 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.864592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.864606 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.866415 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.872823 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.877237 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlt92\" (UniqueName: \"kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92\") pod \"ceilometer-0\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " pod="openstack/ceilometer-0" Oct 09 13:48:21 crc kubenswrapper[4762]: I1009 13:48:21.926472 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:48:22 crc kubenswrapper[4762]: W1009 13:48:22.371524 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf22f1383_fdcf_4018_8a3d_a28c32c0c816.slice/crio-f1d100458e3fd065ef92133ab63468e8bb6e93fb02a77b97a78f5df84682869b WatchSource:0}: Error finding container f1d100458e3fd065ef92133ab63468e8bb6e93fb02a77b97a78f5df84682869b: Status 404 returned error can't find the container with id f1d100458e3fd065ef92133ab63468e8bb6e93fb02a77b97a78f5df84682869b Oct 09 13:48:22 crc kubenswrapper[4762]: I1009 13:48:22.375998 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:22 crc kubenswrapper[4762]: I1009 13:48:22.527850 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerStarted","Data":"f1d100458e3fd065ef92133ab63468e8bb6e93fb02a77b97a78f5df84682869b"} Oct 09 13:48:22 crc kubenswrapper[4762]: I1009 13:48:22.978798 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81246bf0-7924-4dab-9b12-34318201ee1b" path="/var/lib/kubelet/pods/81246bf0-7924-4dab-9b12-34318201ee1b/volumes" Oct 09 13:48:23 crc kubenswrapper[4762]: I1009 13:48:23.540122 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerStarted","Data":"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843"} Oct 09 13:48:24 crc kubenswrapper[4762]: I1009 13:48:24.552998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerStarted","Data":"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e"} Oct 09 13:48:25 crc kubenswrapper[4762]: I1009 13:48:25.565129 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerStarted","Data":"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b"} Oct 09 13:48:26 crc kubenswrapper[4762]: I1009 13:48:26.581698 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerStarted","Data":"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82"} Oct 09 13:48:26 crc kubenswrapper[4762]: I1009 13:48:26.582272 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:48:26 crc kubenswrapper[4762]: I1009 13:48:26.585844 4762 generic.go:334] "Generic (PLEG): container finished" podID="d8a82ea9-76b8-467c-970d-38cd752458bf" containerID="a0e83bcda39754cb99b6d3b5627fe4751f64cf85b83b8ef9245ca2828a780a9f" exitCode=0 Oct 09 13:48:26 crc kubenswrapper[4762]: I1009 13:48:26.585881 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" event={"ID":"d8a82ea9-76b8-467c-970d-38cd752458bf","Type":"ContainerDied","Data":"a0e83bcda39754cb99b6d3b5627fe4751f64cf85b83b8ef9245ca2828a780a9f"} Oct 09 13:48:26 crc kubenswrapper[4762]: I1009 13:48:26.611322 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.90612238 podStartE2EDuration="5.611306813s" podCreationTimestamp="2025-10-09 13:48:21 +0000 UTC" firstStartedPulling="2025-10-09 13:48:22.374155022 +0000 UTC m=+1378.147946061" lastFinishedPulling="2025-10-09 13:48:26.079339445 +0000 UTC m=+1381.853130494" observedRunningTime="2025-10-09 13:48:26.602461363 +0000 UTC m=+1382.376252422" watchObservedRunningTime="2025-10-09 13:48:26.611306813 +0000 UTC m=+1382.385097852" Oct 09 13:48:27 crc kubenswrapper[4762]: I1009 13:48:27.956429 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.042869 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle\") pod \"d8a82ea9-76b8-467c-970d-38cd752458bf\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.043035 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts\") pod \"d8a82ea9-76b8-467c-970d-38cd752458bf\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.043109 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data\") pod \"d8a82ea9-76b8-467c-970d-38cd752458bf\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.043223 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j74x\" (UniqueName: \"kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x\") pod \"d8a82ea9-76b8-467c-970d-38cd752458bf\" (UID: \"d8a82ea9-76b8-467c-970d-38cd752458bf\") " Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.049579 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x" (OuterVolumeSpecName: "kube-api-access-8j74x") pod "d8a82ea9-76b8-467c-970d-38cd752458bf" (UID: "d8a82ea9-76b8-467c-970d-38cd752458bf"). InnerVolumeSpecName "kube-api-access-8j74x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.049656 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts" (OuterVolumeSpecName: "scripts") pod "d8a82ea9-76b8-467c-970d-38cd752458bf" (UID: "d8a82ea9-76b8-467c-970d-38cd752458bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.070422 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data" (OuterVolumeSpecName: "config-data") pod "d8a82ea9-76b8-467c-970d-38cd752458bf" (UID: "d8a82ea9-76b8-467c-970d-38cd752458bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.071702 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8a82ea9-76b8-467c-970d-38cd752458bf" (UID: "d8a82ea9-76b8-467c-970d-38cd752458bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.145958 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.146005 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.146022 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j74x\" (UniqueName: \"kubernetes.io/projected/d8a82ea9-76b8-467c-970d-38cd752458bf-kube-api-access-8j74x\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.146037 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a82ea9-76b8-467c-970d-38cd752458bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.603902 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" event={"ID":"d8a82ea9-76b8-467c-970d-38cd752458bf","Type":"ContainerDied","Data":"fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba"} Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.604753 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa0149ed3b4f5939f13eedeee429b12e381f16d7d9c78add3c295bda63002dba" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.603939 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vtlrb" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.726189 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:48:28 crc kubenswrapper[4762]: E1009 13:48:28.726655 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8a82ea9-76b8-467c-970d-38cd752458bf" containerName="nova-cell0-conductor-db-sync" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.726678 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8a82ea9-76b8-467c-970d-38cd752458bf" containerName="nova-cell0-conductor-db-sync" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.726878 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8a82ea9-76b8-467c-970d-38cd752458bf" containerName="nova-cell0-conductor-db-sync" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.727683 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.729853 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.739265 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-wkv9z" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.744118 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.862449 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4k7c\" (UniqueName: \"kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.862489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.862581 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.964418 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.964547 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4k7c\" (UniqueName: \"kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.964571 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.976512 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.976566 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:28 crc kubenswrapper[4762]: I1009 13:48:28.981201 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4k7c\" (UniqueName: \"kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c\") pod \"nova-cell0-conductor-0\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:29 crc kubenswrapper[4762]: I1009 13:48:29.051828 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:29 crc kubenswrapper[4762]: I1009 13:48:29.462414 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:48:29 crc kubenswrapper[4762]: I1009 13:48:29.613127 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b6e56749-167c-4542-b79f-a374a2f7ef20","Type":"ContainerStarted","Data":"06523f730bc831b54be0e92c7cb67cb3a3928c817bb3ad98e48e86c42f3addbe"} Oct 09 13:48:30 crc kubenswrapper[4762]: I1009 13:48:30.636842 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b6e56749-167c-4542-b79f-a374a2f7ef20","Type":"ContainerStarted","Data":"82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933"} Oct 09 13:48:30 crc kubenswrapper[4762]: I1009 13:48:30.637091 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:30 crc kubenswrapper[4762]: I1009 13:48:30.656548 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.656531031 podStartE2EDuration="2.656531031s" podCreationTimestamp="2025-10-09 13:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:30.652017783 +0000 UTC m=+1386.425808822" watchObservedRunningTime="2025-10-09 13:48:30.656531031 +0000 UTC m=+1386.430322070" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.079010 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.526539 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-fbmj6"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.528061 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.530237 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.530266 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.538787 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fbmj6"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.675041 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkcxd\" (UniqueName: \"kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.675391 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.675437 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.675840 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.676588 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.678003 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.680936 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.702047 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.733182 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.734473 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.744250 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777471 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777561 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ngm4\" (UniqueName: \"kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777618 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkcxd\" (UniqueName: \"kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777679 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777729 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777791 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777852 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.777933 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.791891 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.794623 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.796345 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.806988 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.831451 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkcxd\" (UniqueName: \"kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd\") pod \"nova-cell0-cell-mapping-fbmj6\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.835983 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.839576 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.852313 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.856934 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.865565 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.879543 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ngm4\" (UniqueName: \"kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.879880 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssdjb\" (UniqueName: \"kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.880194 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.880385 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.880500 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.882859 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.882969 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.881147 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.897805 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.901333 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.940455 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.942086 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:34 crc kubenswrapper[4762]: I1009 13:48:34.942384 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ngm4\" (UniqueName: \"kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4\") pod \"nova-api-0\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " pod="openstack/nova-api-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.992986 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993104 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4ls7\" (UniqueName: \"kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993207 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993249 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993282 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993307 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993390 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993461 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993484 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993508 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993707 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cvkt\" (UniqueName: \"kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:34.993797 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssdjb\" (UniqueName: \"kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.000317 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.000604 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.023306 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.030669 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssdjb\" (UniqueName: \"kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb\") pod \"nova-cell1-novncproxy-0\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.047700 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.056315 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.058180 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.059704 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.067742 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.093080 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095164 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095192 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095212 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095277 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cvkt\" (UniqueName: \"kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095330 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095363 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4ls7\" (UniqueName: \"kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095396 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095415 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095431 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.095446 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.098219 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.098826 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.098844 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.101046 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.103111 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.103999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.114125 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.117435 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.121827 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4ls7\" (UniqueName: \"kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7\") pod \"dnsmasq-dns-865f5d856f-m7gs2\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.122053 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cvkt\" (UniqueName: \"kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt\") pod \"nova-metadata-0\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.197083 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.197127 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.197157 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwzhf\" (UniqueName: \"kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.298774 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.298822 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.298864 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwzhf\" (UniqueName: \"kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.304591 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.305178 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.320529 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwzhf\" (UniqueName: \"kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.339359 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.368378 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.379160 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.544810 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.568838 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-fbmj6"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.638852 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cx7gg"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.655097 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.663240 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.664063 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.666620 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cx7gg"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.709754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.709868 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czfkp\" (UniqueName: \"kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.709949 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.710015 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.721972 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerStarted","Data":"ae03b386896d48372c96fea9ad585e372bec05d3106723c7e741d4f8bb4f6734"} Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.724324 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fbmj6" event={"ID":"21e8bb8c-2024-4cc3-8887-17e4b8794601","Type":"ContainerStarted","Data":"dca1e5fe7e2ae21e16b7a47c5a264c748cf9140a62b862df1012f2869bd7aa3a"} Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.812515 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czfkp\" (UniqueName: \"kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.823521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.813833 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.823763 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: W1009 13:48:35.823892 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8ac6cff_2144_4d02_964f_8b7d866f0520.slice/crio-2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a WatchSource:0}: Error finding container 2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a: Status 404 returned error can't find the container with id 2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.824014 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.840036 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.842607 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.859949 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:35 crc kubenswrapper[4762]: I1009 13:48:35.880857 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czfkp\" (UniqueName: \"kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp\") pod \"nova-cell1-conductor-db-sync-cx7gg\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.000822 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.132093 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.153347 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.164523 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.530978 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cx7gg"] Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.739890 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fbmj6" event={"ID":"21e8bb8c-2024-4cc3-8887-17e4b8794601","Type":"ContainerStarted","Data":"1d3806fa170fdbcbf424aff078a59eb080a2c478ec242116e7bd89a246b96fe7"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.743009 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerStarted","Data":"0ff15c4882149d69ec7406ec9aeb69f9026c9199d471e4eed2a0ca5d37a26c59"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.744693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9984c147-5425-4b69-ace5-52d351f46933","Type":"ContainerStarted","Data":"fb26cf7feba68ef50593aeb288bee79119699a34eceec7ad350920a3ad36d5ec"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.746202 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8ac6cff-2144-4d02-964f-8b7d866f0520","Type":"ContainerStarted","Data":"2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.747420 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" event={"ID":"a34ea94b-16a9-4de9-9179-a729ef91f7af","Type":"ContainerStarted","Data":"1bf070f962c09f175c6374a523ae959f9dbb1fa3cb26fce3014605704172c7e4"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.751078 4762 generic.go:334] "Generic (PLEG): container finished" podID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerID="8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb" exitCode=0 Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.751135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" event={"ID":"2d928647-7b87-4be1-9b66-5b146f24f93a","Type":"ContainerDied","Data":"8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.751181 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" event={"ID":"2d928647-7b87-4be1-9b66-5b146f24f93a","Type":"ContainerStarted","Data":"5af0e78373f3c6a974bca79b24219932580b2b263d3343abb32f73f3602ffc22"} Oct 09 13:48:36 crc kubenswrapper[4762]: I1009 13:48:36.777814 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-fbmj6" podStartSLOduration=2.77778832 podStartE2EDuration="2.77778832s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:36.759604526 +0000 UTC m=+1392.533395575" watchObservedRunningTime="2025-10-09 13:48:36.77778832 +0000 UTC m=+1392.551579359" Oct 09 13:48:37 crc kubenswrapper[4762]: I1009 13:48:37.766617 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" event={"ID":"a34ea94b-16a9-4de9-9179-a729ef91f7af","Type":"ContainerStarted","Data":"92507a238bded5661e7b90d68dece979b10a9b1d40801f032f668531f54c7d94"} Oct 09 13:48:37 crc kubenswrapper[4762]: I1009 13:48:37.793852 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" podStartSLOduration=2.793833688 podStartE2EDuration="2.793833688s" podCreationTimestamp="2025-10-09 13:48:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:37.78813105 +0000 UTC m=+1393.561922109" watchObservedRunningTime="2025-10-09 13:48:37.793833688 +0000 UTC m=+1393.567624727" Oct 09 13:48:38 crc kubenswrapper[4762]: I1009 13:48:38.507569 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:38 crc kubenswrapper[4762]: I1009 13:48:38.536710 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.787372 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerStarted","Data":"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866"} Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.789354 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9984c147-5425-4b69-ace5-52d351f46933","Type":"ContainerStarted","Data":"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0"} Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.795177 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8ac6cff-2144-4d02-964f-8b7d866f0520","Type":"ContainerStarted","Data":"fc250e3d796c373b1c56af7e1cae32a615f042162866abd13fd13f6d9adf108b"} Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.795318 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f8ac6cff-2144-4d02-964f-8b7d866f0520" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://fc250e3d796c373b1c56af7e1cae32a615f042162866abd13fd13f6d9adf108b" gracePeriod=30 Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.798862 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" event={"ID":"2d928647-7b87-4be1-9b66-5b146f24f93a","Type":"ContainerStarted","Data":"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395"} Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.800003 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.801646 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerStarted","Data":"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2"} Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.814052 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.664163108 podStartE2EDuration="5.814030123s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="2025-10-09 13:48:36.19087687 +0000 UTC m=+1391.964667909" lastFinishedPulling="2025-10-09 13:48:39.340743885 +0000 UTC m=+1395.114534924" observedRunningTime="2025-10-09 13:48:39.808177281 +0000 UTC m=+1395.581968320" watchObservedRunningTime="2025-10-09 13:48:39.814030123 +0000 UTC m=+1395.587821162" Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.835975 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" podStartSLOduration=5.835933585 podStartE2EDuration="5.835933585s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:39.83113355 +0000 UTC m=+1395.604924589" watchObservedRunningTime="2025-10-09 13:48:39.835933585 +0000 UTC m=+1395.609724624" Oct 09 13:48:39 crc kubenswrapper[4762]: I1009 13:48:39.854484 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.431561005 podStartE2EDuration="5.854461428s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="2025-10-09 13:48:35.854968513 +0000 UTC m=+1391.628759552" lastFinishedPulling="2025-10-09 13:48:39.277868936 +0000 UTC m=+1395.051659975" observedRunningTime="2025-10-09 13:48:39.850308469 +0000 UTC m=+1395.624099518" watchObservedRunningTime="2025-10-09 13:48:39.854461428 +0000 UTC m=+1395.628252467" Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.057268 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.379898 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.813263 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerStarted","Data":"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e"} Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.816424 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerStarted","Data":"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e"} Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.816696 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-log" containerID="cri-o://ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" gracePeriod=30 Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.816863 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-metadata" containerID="cri-o://682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" gracePeriod=30 Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.843844 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.129197661 podStartE2EDuration="6.84382711s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="2025-10-09 13:48:35.564939152 +0000 UTC m=+1391.338730191" lastFinishedPulling="2025-10-09 13:48:39.279568601 +0000 UTC m=+1395.053359640" observedRunningTime="2025-10-09 13:48:40.836220072 +0000 UTC m=+1396.610011111" watchObservedRunningTime="2025-10-09 13:48:40.84382711 +0000 UTC m=+1396.617618149" Oct 09 13:48:40 crc kubenswrapper[4762]: I1009 13:48:40.864273 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.750988781 podStartE2EDuration="6.864249453s" podCreationTimestamp="2025-10-09 13:48:34 +0000 UTC" firstStartedPulling="2025-10-09 13:48:36.204590507 +0000 UTC m=+1391.978381546" lastFinishedPulling="2025-10-09 13:48:39.317851179 +0000 UTC m=+1395.091642218" observedRunningTime="2025-10-09 13:48:40.857341693 +0000 UTC m=+1396.631132742" watchObservedRunningTime="2025-10-09 13:48:40.864249453 +0000 UTC m=+1396.638040492" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.397125 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.447685 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data\") pod \"e49941c7-e414-42cb-8616-c836bd164ef8\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.447791 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle\") pod \"e49941c7-e414-42cb-8616-c836bd164ef8\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.448020 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs\") pod \"e49941c7-e414-42cb-8616-c836bd164ef8\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.448133 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cvkt\" (UniqueName: \"kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt\") pod \"e49941c7-e414-42cb-8616-c836bd164ef8\" (UID: \"e49941c7-e414-42cb-8616-c836bd164ef8\") " Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.450270 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs" (OuterVolumeSpecName: "logs") pod "e49941c7-e414-42cb-8616-c836bd164ef8" (UID: "e49941c7-e414-42cb-8616-c836bd164ef8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.458409 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt" (OuterVolumeSpecName: "kube-api-access-8cvkt") pod "e49941c7-e414-42cb-8616-c836bd164ef8" (UID: "e49941c7-e414-42cb-8616-c836bd164ef8"). InnerVolumeSpecName "kube-api-access-8cvkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.484221 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data" (OuterVolumeSpecName: "config-data") pod "e49941c7-e414-42cb-8616-c836bd164ef8" (UID: "e49941c7-e414-42cb-8616-c836bd164ef8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.485134 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e49941c7-e414-42cb-8616-c836bd164ef8" (UID: "e49941c7-e414-42cb-8616-c836bd164ef8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.551402 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cvkt\" (UniqueName: \"kubernetes.io/projected/e49941c7-e414-42cb-8616-c836bd164ef8-kube-api-access-8cvkt\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.551448 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.551461 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e49941c7-e414-42cb-8616-c836bd164ef8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.551471 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e49941c7-e414-42cb-8616-c836bd164ef8-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.847330 4762 generic.go:334] "Generic (PLEG): container finished" podID="e49941c7-e414-42cb-8616-c836bd164ef8" containerID="682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" exitCode=0 Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.847372 4762 generic.go:334] "Generic (PLEG): container finished" podID="e49941c7-e414-42cb-8616-c836bd164ef8" containerID="ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" exitCode=143 Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.848308 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.850745 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerDied","Data":"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e"} Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.850788 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerDied","Data":"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866"} Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.850802 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e49941c7-e414-42cb-8616-c836bd164ef8","Type":"ContainerDied","Data":"0ff15c4882149d69ec7406ec9aeb69f9026c9199d471e4eed2a0ca5d37a26c59"} Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.850818 4762 scope.go:117] "RemoveContainer" containerID="682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.884449 4762 scope.go:117] "RemoveContainer" containerID="ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.897315 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.910696 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.910757 4762 scope.go:117] "RemoveContainer" containerID="682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" Oct 09 13:48:41 crc kubenswrapper[4762]: E1009 13:48:41.912447 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e\": container with ID starting with 682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e not found: ID does not exist" containerID="682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.912578 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e"} err="failed to get container status \"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e\": rpc error: code = NotFound desc = could not find container \"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e\": container with ID starting with 682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e not found: ID does not exist" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.912685 4762 scope.go:117] "RemoveContainer" containerID="ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" Oct 09 13:48:41 crc kubenswrapper[4762]: E1009 13:48:41.913276 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866\": container with ID starting with ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866 not found: ID does not exist" containerID="ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.913330 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866"} err="failed to get container status \"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866\": rpc error: code = NotFound desc = could not find container \"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866\": container with ID starting with ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866 not found: ID does not exist" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.913363 4762 scope.go:117] "RemoveContainer" containerID="682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.916598 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e"} err="failed to get container status \"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e\": rpc error: code = NotFound desc = could not find container \"682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e\": container with ID starting with 682f7e47aadd205131c95bfad65a6ec9317ae66bd76ba6e08e1ba4db3180319e not found: ID does not exist" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.916773 4762 scope.go:117] "RemoveContainer" containerID="ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.918155 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866"} err="failed to get container status \"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866\": rpc error: code = NotFound desc = could not find container \"ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866\": container with ID starting with ead7caa05a650d96846f40535d990f340d2fe69986d670d153105a07c3cf9866 not found: ID does not exist" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.928299 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:41 crc kubenswrapper[4762]: E1009 13:48:41.928811 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-metadata" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.928836 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-metadata" Oct 09 13:48:41 crc kubenswrapper[4762]: E1009 13:48:41.928864 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-log" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.928872 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-log" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.929123 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-metadata" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.929144 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" containerName="nova-metadata-log" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.930347 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.935479 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.935917 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.939817 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.959724 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc9rz\" (UniqueName: \"kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.960657 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.960772 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.960839 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:41 crc kubenswrapper[4762]: I1009 13:48:41.961303 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.063119 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.063228 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.063264 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.063298 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.063332 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc9rz\" (UniqueName: \"kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.064150 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.068163 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.068211 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.068215 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.085169 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc9rz\" (UniqueName: \"kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz\") pod \"nova-metadata-0\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.259822 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.743387 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:42 crc kubenswrapper[4762]: W1009 13:48:42.745707 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e72fcaf_6d45_40c1_80c7_c285c13334dc.slice/crio-f41111c17661dc4b5ab1e4b92561b116f62a4a99eae06e1f9b36d8610d63d9ee WatchSource:0}: Error finding container f41111c17661dc4b5ab1e4b92561b116f62a4a99eae06e1f9b36d8610d63d9ee: Status 404 returned error can't find the container with id f41111c17661dc4b5ab1e4b92561b116f62a4a99eae06e1f9b36d8610d63d9ee Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.859211 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerStarted","Data":"f41111c17661dc4b5ab1e4b92561b116f62a4a99eae06e1f9b36d8610d63d9ee"} Oct 09 13:48:42 crc kubenswrapper[4762]: I1009 13:48:42.977423 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e49941c7-e414-42cb-8616-c836bd164ef8" path="/var/lib/kubelet/pods/e49941c7-e414-42cb-8616-c836bd164ef8/volumes" Oct 09 13:48:43 crc kubenswrapper[4762]: I1009 13:48:43.870513 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerStarted","Data":"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756"} Oct 09 13:48:43 crc kubenswrapper[4762]: I1009 13:48:43.870940 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerStarted","Data":"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36"} Oct 09 13:48:43 crc kubenswrapper[4762]: I1009 13:48:43.900918 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.9008950860000002 podStartE2EDuration="2.900895086s" podCreationTimestamp="2025-10-09 13:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:43.8933435 +0000 UTC m=+1399.667134539" watchObservedRunningTime="2025-10-09 13:48:43.900895086 +0000 UTC m=+1399.674686135" Oct 09 13:48:44 crc kubenswrapper[4762]: I1009 13:48:44.883099 4762 generic.go:334] "Generic (PLEG): container finished" podID="21e8bb8c-2024-4cc3-8887-17e4b8794601" containerID="1d3806fa170fdbcbf424aff078a59eb080a2c478ec242116e7bd89a246b96fe7" exitCode=0 Oct 09 13:48:44 crc kubenswrapper[4762]: I1009 13:48:44.883151 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fbmj6" event={"ID":"21e8bb8c-2024-4cc3-8887-17e4b8794601","Type":"ContainerDied","Data":"1d3806fa170fdbcbf424aff078a59eb080a2c478ec242116e7bd89a246b96fe7"} Oct 09 13:48:44 crc kubenswrapper[4762]: I1009 13:48:44.885378 4762 generic.go:334] "Generic (PLEG): container finished" podID="a34ea94b-16a9-4de9-9179-a729ef91f7af" containerID="92507a238bded5661e7b90d68dece979b10a9b1d40801f032f668531f54c7d94" exitCode=0 Oct 09 13:48:44 crc kubenswrapper[4762]: I1009 13:48:44.885701 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" event={"ID":"a34ea94b-16a9-4de9-9179-a729ef91f7af","Type":"ContainerDied","Data":"92507a238bded5661e7b90d68dece979b10a9b1d40801f032f668531f54c7d94"} Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.002771 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.002830 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.370610 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.379951 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.428470 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.428710 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="dnsmasq-dns" containerID="cri-o://bb7094500d79c0b318ce5fd00a0464df84913dcb767170c8cc4e6662117a51d7" gracePeriod=10 Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.433263 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.902589 4762 generic.go:334] "Generic (PLEG): container finished" podID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerID="bb7094500d79c0b318ce5fd00a0464df84913dcb767170c8cc4e6662117a51d7" exitCode=0 Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.902685 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" event={"ID":"c881159f-991c-4817-b1e5-9ca70a30d8eb","Type":"ContainerDied","Data":"bb7094500d79c0b318ce5fd00a0464df84913dcb767170c8cc4e6662117a51d7"} Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.903140 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" event={"ID":"c881159f-991c-4817-b1e5-9ca70a30d8eb","Type":"ContainerDied","Data":"feb0f500cd9c914a1a5f51b20192518cfafb75af694a32c25a62f4c57afc9b8f"} Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.903167 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="feb0f500cd9c914a1a5f51b20192518cfafb75af694a32c25a62f4c57afc9b8f" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.955107 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:48:45 crc kubenswrapper[4762]: I1009 13:48:45.955759 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041069 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041171 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nd55\" (UniqueName: \"kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041219 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041378 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041444 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.041488 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config\") pod \"c881159f-991c-4817-b1e5-9ca70a30d8eb\" (UID: \"c881159f-991c-4817-b1e5-9ca70a30d8eb\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.046224 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.055017 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55" (OuterVolumeSpecName: "kube-api-access-8nd55") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "kube-api-access-8nd55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.089310 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.125073 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config" (OuterVolumeSpecName: "config") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.154209 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.154258 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nd55\" (UniqueName: \"kubernetes.io/projected/c881159f-991c-4817-b1e5-9ca70a30d8eb-kube-api-access-8nd55\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.154282 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.164586 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.170410 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.188885 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c881159f-991c-4817-b1e5-9ca70a30d8eb" (UID: "c881159f-991c-4817-b1e5-9ca70a30d8eb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.255835 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.255878 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.255894 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.255904 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c881159f-991c-4817-b1e5-9ca70a30d8eb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.363950 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.459810 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle\") pod \"a34ea94b-16a9-4de9-9179-a729ef91f7af\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.460185 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts\") pod \"a34ea94b-16a9-4de9-9179-a729ef91f7af\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.460257 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data\") pod \"a34ea94b-16a9-4de9-9179-a729ef91f7af\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.460350 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czfkp\" (UniqueName: \"kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp\") pod \"a34ea94b-16a9-4de9-9179-a729ef91f7af\" (UID: \"a34ea94b-16a9-4de9-9179-a729ef91f7af\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.463297 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts" (OuterVolumeSpecName: "scripts") pod "a34ea94b-16a9-4de9-9179-a729ef91f7af" (UID: "a34ea94b-16a9-4de9-9179-a729ef91f7af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.466690 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp" (OuterVolumeSpecName: "kube-api-access-czfkp") pod "a34ea94b-16a9-4de9-9179-a729ef91f7af" (UID: "a34ea94b-16a9-4de9-9179-a729ef91f7af"). InnerVolumeSpecName "kube-api-access-czfkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.466770 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.491900 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a34ea94b-16a9-4de9-9179-a729ef91f7af" (UID: "a34ea94b-16a9-4de9-9179-a729ef91f7af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.508023 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data" (OuterVolumeSpecName: "config-data") pod "a34ea94b-16a9-4de9-9179-a729ef91f7af" (UID: "a34ea94b-16a9-4de9-9179-a729ef91f7af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.561894 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data\") pod \"21e8bb8c-2024-4cc3-8887-17e4b8794601\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562079 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle\") pod \"21e8bb8c-2024-4cc3-8887-17e4b8794601\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562115 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts\") pod \"21e8bb8c-2024-4cc3-8887-17e4b8794601\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562208 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkcxd\" (UniqueName: \"kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd\") pod \"21e8bb8c-2024-4cc3-8887-17e4b8794601\" (UID: \"21e8bb8c-2024-4cc3-8887-17e4b8794601\") " Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562775 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562803 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562818 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czfkp\" (UniqueName: \"kubernetes.io/projected/a34ea94b-16a9-4de9-9179-a729ef91f7af-kube-api-access-czfkp\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.562831 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a34ea94b-16a9-4de9-9179-a729ef91f7af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.566744 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd" (OuterVolumeSpecName: "kube-api-access-wkcxd") pod "21e8bb8c-2024-4cc3-8887-17e4b8794601" (UID: "21e8bb8c-2024-4cc3-8887-17e4b8794601"). InnerVolumeSpecName "kube-api-access-wkcxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.567813 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts" (OuterVolumeSpecName: "scripts") pod "21e8bb8c-2024-4cc3-8887-17e4b8794601" (UID: "21e8bb8c-2024-4cc3-8887-17e4b8794601"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.593736 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21e8bb8c-2024-4cc3-8887-17e4b8794601" (UID: "21e8bb8c-2024-4cc3-8887-17e4b8794601"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.606166 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data" (OuterVolumeSpecName: "config-data") pod "21e8bb8c-2024-4cc3-8887-17e4b8794601" (UID: "21e8bb8c-2024-4cc3-8887-17e4b8794601"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.664694 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.664739 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.664755 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e8bb8c-2024-4cc3-8887-17e4b8794601-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.664766 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkcxd\" (UniqueName: \"kubernetes.io/projected/21e8bb8c-2024-4cc3-8887-17e4b8794601-kube-api-access-wkcxd\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.917856 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" event={"ID":"a34ea94b-16a9-4de9-9179-a729ef91f7af","Type":"ContainerDied","Data":"1bf070f962c09f175c6374a523ae959f9dbb1fa3cb26fce3014605704172c7e4"} Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.917901 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bf070f962c09f175c6374a523ae959f9dbb1fa3cb26fce3014605704172c7e4" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.917965 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-cx7gg" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.920839 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-xggsj" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.921256 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-fbmj6" Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.921771 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-fbmj6" event={"ID":"21e8bb8c-2024-4cc3-8887-17e4b8794601","Type":"ContainerDied","Data":"dca1e5fe7e2ae21e16b7a47c5a264c748cf9140a62b862df1012f2869bd7aa3a"} Oct 09 13:48:46 crc kubenswrapper[4762]: I1009 13:48:46.921829 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dca1e5fe7e2ae21e16b7a47c5a264c748cf9140a62b862df1012f2869bd7aa3a" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.016076 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.026739 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-xggsj"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.035305 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: E1009 13:48:47.035851 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="init" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.035866 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="init" Oct 09 13:48:47 crc kubenswrapper[4762]: E1009 13:48:47.035888 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e8bb8c-2024-4cc3-8887-17e4b8794601" containerName="nova-manage" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.035896 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e8bb8c-2024-4cc3-8887-17e4b8794601" containerName="nova-manage" Oct 09 13:48:47 crc kubenswrapper[4762]: E1009 13:48:47.035907 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a34ea94b-16a9-4de9-9179-a729ef91f7af" containerName="nova-cell1-conductor-db-sync" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.035913 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a34ea94b-16a9-4de9-9179-a729ef91f7af" containerName="nova-cell1-conductor-db-sync" Oct 09 13:48:47 crc kubenswrapper[4762]: E1009 13:48:47.035930 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="dnsmasq-dns" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.035936 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="dnsmasq-dns" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.036169 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a34ea94b-16a9-4de9-9179-a729ef91f7af" containerName="nova-cell1-conductor-db-sync" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.036182 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e8bb8c-2024-4cc3-8887-17e4b8794601" containerName="nova-manage" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.036206 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" containerName="dnsmasq-dns" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.036977 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.040874 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.042493 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.076067 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.076308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-674km\" (UniqueName: \"kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.076347 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.096987 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.097291 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-log" containerID="cri-o://ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2" gracePeriod=30 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.097371 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-api" containerID="cri-o://da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e" gracePeriod=30 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.109467 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.146956 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.147512 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-log" containerID="cri-o://29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" gracePeriod=30 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.147618 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-metadata" containerID="cri-o://7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" gracePeriod=30 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.177694 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-674km\" (UniqueName: \"kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.177756 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.177831 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.183396 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.186076 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.193959 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-674km\" (UniqueName: \"kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km\") pod \"nova-cell1-conductor-0\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.260054 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.260109 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.371192 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.700790 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:48:47 crc kubenswrapper[4762]: W1009 13:48:47.712522 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b85dbb0_642b_4f4e_a616_7904624b2e5a.slice/crio-5ca63dcde2011037f2eeb735027aadd6310220f41466c6f1d06c51edad84d00f WatchSource:0}: Error finding container 5ca63dcde2011037f2eeb735027aadd6310220f41466c6f1d06c51edad84d00f: Status 404 returned error can't find the container with id 5ca63dcde2011037f2eeb735027aadd6310220f41466c6f1d06c51edad84d00f Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.823194 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.892480 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs\") pod \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.892596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc9rz\" (UniqueName: \"kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz\") pod \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.892673 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle\") pod \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.892719 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs\") pod \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.892755 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data\") pod \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\" (UID: \"3e72fcaf-6d45-40c1-80c7-c285c13334dc\") " Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.893251 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs" (OuterVolumeSpecName: "logs") pod "3e72fcaf-6d45-40c1-80c7-c285c13334dc" (UID: "3e72fcaf-6d45-40c1-80c7-c285c13334dc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.899321 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz" (OuterVolumeSpecName: "kube-api-access-bc9rz") pod "3e72fcaf-6d45-40c1-80c7-c285c13334dc" (UID: "3e72fcaf-6d45-40c1-80c7-c285c13334dc"). InnerVolumeSpecName "kube-api-access-bc9rz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.931735 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data" (OuterVolumeSpecName: "config-data") pod "3e72fcaf-6d45-40c1-80c7-c285c13334dc" (UID: "3e72fcaf-6d45-40c1-80c7-c285c13334dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936000 4762 generic.go:334] "Generic (PLEG): container finished" podID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerID="7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" exitCode=0 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936042 4762 generic.go:334] "Generic (PLEG): container finished" podID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerID="29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" exitCode=143 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936115 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerDied","Data":"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756"} Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936143 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerDied","Data":"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36"} Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936163 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3e72fcaf-6d45-40c1-80c7-c285c13334dc","Type":"ContainerDied","Data":"f41111c17661dc4b5ab1e4b92561b116f62a4a99eae06e1f9b36d8610d63d9ee"} Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936178 4762 scope.go:117] "RemoveContainer" containerID="7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.936290 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.942362 4762 generic.go:334] "Generic (PLEG): container finished" podID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerID="ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2" exitCode=143 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.942492 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerDied","Data":"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2"} Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.946911 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e72fcaf-6d45-40c1-80c7-c285c13334dc" (UID: "3e72fcaf-6d45-40c1-80c7-c285c13334dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.948231 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b85dbb0-642b-4f4e-a616-7904624b2e5a","Type":"ContainerStarted","Data":"5ca63dcde2011037f2eeb735027aadd6310220f41466c6f1d06c51edad84d00f"} Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.948271 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9984c147-5425-4b69-ace5-52d351f46933" containerName="nova-scheduler-scheduler" containerID="cri-o://a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" gracePeriod=30 Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.949039 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.980939 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3e72fcaf-6d45-40c1-80c7-c285c13334dc" (UID: "3e72fcaf-6d45-40c1-80c7-c285c13334dc"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.983032 4762 scope.go:117] "RemoveContainer" containerID="29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.989176 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.9891512059999998 podStartE2EDuration="1.989151206s" podCreationTimestamp="2025-10-09 13:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:47.967838141 +0000 UTC m=+1403.741629190" watchObservedRunningTime="2025-10-09 13:48:47.989151206 +0000 UTC m=+1403.762942255" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.994993 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e72fcaf-6d45-40c1-80c7-c285c13334dc-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.995020 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc9rz\" (UniqueName: \"kubernetes.io/projected/3e72fcaf-6d45-40c1-80c7-c285c13334dc-kube-api-access-bc9rz\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.995031 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.995041 4762 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:47 crc kubenswrapper[4762]: I1009 13:48:47.995051 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e72fcaf-6d45-40c1-80c7-c285c13334dc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.007909 4762 scope.go:117] "RemoveContainer" containerID="7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" Oct 09 13:48:48 crc kubenswrapper[4762]: E1009 13:48:48.008392 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756\": container with ID starting with 7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756 not found: ID does not exist" containerID="7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.008434 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756"} err="failed to get container status \"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756\": rpc error: code = NotFound desc = could not find container \"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756\": container with ID starting with 7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756 not found: ID does not exist" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.008461 4762 scope.go:117] "RemoveContainer" containerID="29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" Oct 09 13:48:48 crc kubenswrapper[4762]: E1009 13:48:48.008738 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36\": container with ID starting with 29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36 not found: ID does not exist" containerID="29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.008758 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36"} err="failed to get container status \"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36\": rpc error: code = NotFound desc = could not find container \"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36\": container with ID starting with 29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36 not found: ID does not exist" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.008770 4762 scope.go:117] "RemoveContainer" containerID="7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.009103 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756"} err="failed to get container status \"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756\": rpc error: code = NotFound desc = could not find container \"7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756\": container with ID starting with 7992ba3e60112ac0fa87bc532118f893cef5673ec2e01f3617e682f28e948756 not found: ID does not exist" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.009161 4762 scope.go:117] "RemoveContainer" containerID="29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.009690 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36"} err="failed to get container status \"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36\": rpc error: code = NotFound desc = could not find container \"29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36\": container with ID starting with 29f0968905c1d644913d70e0c63551c36b8563785c2823e83346d0ec54790c36 not found: ID does not exist" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.272967 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.291310 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.317731 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:48 crc kubenswrapper[4762]: E1009 13:48:48.318167 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-log" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.318183 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-log" Oct 09 13:48:48 crc kubenswrapper[4762]: E1009 13:48:48.318198 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-metadata" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.318204 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-metadata" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.318381 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-log" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.318398 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" containerName="nova-metadata-metadata" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.319378 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.322721 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.327751 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.332251 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.407927 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.407974 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.408009 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.408201 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bkqp\" (UniqueName: \"kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.408397 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.509670 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.509858 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.509974 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.510159 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bkqp\" (UniqueName: \"kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.510341 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.510448 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.513833 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.514517 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.514614 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.526943 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bkqp\" (UniqueName: \"kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp\") pod \"nova-metadata-0\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.636118 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.961729 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b85dbb0-642b-4f4e-a616-7904624b2e5a","Type":"ContainerStarted","Data":"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343"} Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.978908 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e72fcaf-6d45-40c1-80c7-c285c13334dc" path="/var/lib/kubelet/pods/3e72fcaf-6d45-40c1-80c7-c285c13334dc/volumes" Oct 09 13:48:48 crc kubenswrapper[4762]: I1009 13:48:48.979700 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c881159f-991c-4817-b1e5-9ca70a30d8eb" path="/var/lib/kubelet/pods/c881159f-991c-4817-b1e5-9ca70a30d8eb/volumes" Oct 09 13:48:49 crc kubenswrapper[4762]: I1009 13:48:49.071360 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:48:49 crc kubenswrapper[4762]: I1009 13:48:49.973081 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerStarted","Data":"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a"} Oct 09 13:48:49 crc kubenswrapper[4762]: I1009 13:48:49.973758 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerStarted","Data":"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46"} Oct 09 13:48:49 crc kubenswrapper[4762]: I1009 13:48:49.973773 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerStarted","Data":"33daca6339931d222d2c3256862ee48b0e4028e67aee3eb978c0a455ea9dd335"} Oct 09 13:48:49 crc kubenswrapper[4762]: I1009 13:48:49.995513 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.995497031 podStartE2EDuration="1.995497031s" podCreationTimestamp="2025-10-09 13:48:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:49.990961473 +0000 UTC m=+1405.764752532" watchObservedRunningTime="2025-10-09 13:48:49.995497031 +0000 UTC m=+1405.769288070" Oct 09 13:48:50 crc kubenswrapper[4762]: E1009 13:48:50.382004 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:48:50 crc kubenswrapper[4762]: E1009 13:48:50.383643 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:48:50 crc kubenswrapper[4762]: E1009 13:48:50.385042 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:48:50 crc kubenswrapper[4762]: E1009 13:48:50.385134 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="9984c147-5425-4b69-ace5-52d351f46933" containerName="nova-scheduler-scheduler" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.904920 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.917032 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.939529 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.978581 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle\") pod \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.978729 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data\") pod \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.978885 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwzhf\" (UniqueName: \"kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf\") pod \"9984c147-5425-4b69-ace5-52d351f46933\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.978973 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs\") pod \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.979043 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ngm4\" (UniqueName: \"kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4\") pod \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\" (UID: \"b2efe15b-237f-4bb4-a563-a2a488cf3ddb\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.979064 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle\") pod \"9984c147-5425-4b69-ace5-52d351f46933\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.979123 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data\") pod \"9984c147-5425-4b69-ace5-52d351f46933\" (UID: \"9984c147-5425-4b69-ace5-52d351f46933\") " Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.980494 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs" (OuterVolumeSpecName: "logs") pod "b2efe15b-237f-4bb4-a563-a2a488cf3ddb" (UID: "b2efe15b-237f-4bb4-a563-a2a488cf3ddb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.985384 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4" (OuterVolumeSpecName: "kube-api-access-8ngm4") pod "b2efe15b-237f-4bb4-a563-a2a488cf3ddb" (UID: "b2efe15b-237f-4bb4-a563-a2a488cf3ddb"). InnerVolumeSpecName "kube-api-access-8ngm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:51 crc kubenswrapper[4762]: I1009 13:48:51.987907 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf" (OuterVolumeSpecName: "kube-api-access-wwzhf") pod "9984c147-5425-4b69-ace5-52d351f46933" (UID: "9984c147-5425-4b69-ace5-52d351f46933"). InnerVolumeSpecName "kube-api-access-wwzhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.003969 4762 generic.go:334] "Generic (PLEG): container finished" podID="9984c147-5425-4b69-ace5-52d351f46933" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" exitCode=0 Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.004041 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9984c147-5425-4b69-ace5-52d351f46933","Type":"ContainerDied","Data":"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0"} Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.004141 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9984c147-5425-4b69-ace5-52d351f46933","Type":"ContainerDied","Data":"fb26cf7feba68ef50593aeb288bee79119699a34eceec7ad350920a3ad36d5ec"} Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.004164 4762 scope.go:117] "RemoveContainer" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.004474 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.011542 4762 generic.go:334] "Generic (PLEG): container finished" podID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerID="da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e" exitCode=0 Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.011582 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerDied","Data":"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e"} Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.011605 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b2efe15b-237f-4bb4-a563-a2a488cf3ddb","Type":"ContainerDied","Data":"ae03b386896d48372c96fea9ad585e372bec05d3106723c7e741d4f8bb4f6734"} Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.011772 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.012900 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9984c147-5425-4b69-ace5-52d351f46933" (UID: "9984c147-5425-4b69-ace5-52d351f46933"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.019511 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2efe15b-237f-4bb4-a563-a2a488cf3ddb" (UID: "b2efe15b-237f-4bb4-a563-a2a488cf3ddb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.024196 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data" (OuterVolumeSpecName: "config-data") pod "9984c147-5425-4b69-ace5-52d351f46933" (UID: "9984c147-5425-4b69-ace5-52d351f46933"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.030896 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data" (OuterVolumeSpecName: "config-data") pod "b2efe15b-237f-4bb4-a563-a2a488cf3ddb" (UID: "b2efe15b-237f-4bb4-a563-a2a488cf3ddb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.039519 4762 scope.go:117] "RemoveContainer" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.040199 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0\": container with ID starting with a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0 not found: ID does not exist" containerID="a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.040281 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0"} err="failed to get container status \"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0\": rpc error: code = NotFound desc = could not find container \"a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0\": container with ID starting with a75686bc8f9e45a3d2116d09b6a9000a3597a0e4c26b1aaa15cb3a1f81b44fa0 not found: ID does not exist" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.040330 4762 scope.go:117] "RemoveContainer" containerID="da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.062328 4762 scope.go:117] "RemoveContainer" containerID="ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.081144 4762 scope.go:117] "RemoveContainer" containerID="da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e" Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.083261 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e\": container with ID starting with da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e not found: ID does not exist" containerID="da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.083353 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e"} err="failed to get container status \"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e\": rpc error: code = NotFound desc = could not find container \"da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e\": container with ID starting with da7e27c4a037e15c1fa6804a3a96ea9f69d178f01e75490562436ba0579c636e not found: ID does not exist" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.083430 4762 scope.go:117] "RemoveContainer" containerID="ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2" Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.083757 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2\": container with ID starting with ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2 not found: ID does not exist" containerID="ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.083790 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2"} err="failed to get container status \"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2\": rpc error: code = NotFound desc = could not find container \"ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2\": container with ID starting with ad2b4b149b35475bb1f29b764ad78f0b39597a13c5a6f4b3a965d4c95cede3a2 not found: ID does not exist" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085092 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwzhf\" (UniqueName: \"kubernetes.io/projected/9984c147-5425-4b69-ace5-52d351f46933-kube-api-access-wwzhf\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085120 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085133 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ngm4\" (UniqueName: \"kubernetes.io/projected/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-kube-api-access-8ngm4\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085142 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085151 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9984c147-5425-4b69-ace5-52d351f46933-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085160 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.085170 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2efe15b-237f-4bb4-a563-a2a488cf3ddb-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.346139 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.357757 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.368486 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.385743 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.403229 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.403862 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9984c147-5425-4b69-ace5-52d351f46933" containerName="nova-scheduler-scheduler" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.403886 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9984c147-5425-4b69-ace5-52d351f46933" containerName="nova-scheduler-scheduler" Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.403909 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-log" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.403918 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-log" Oct 09 13:48:52 crc kubenswrapper[4762]: E1009 13:48:52.403963 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-api" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.403973 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-api" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.404189 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-api" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.404262 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9984c147-5425-4b69-ace5-52d351f46933" containerName="nova-scheduler-scheduler" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.404282 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" containerName="nova-api-log" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.405192 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.409419 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.414787 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.415582 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.424051 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.428416 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.439069 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.440193 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493499 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493564 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493755 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493851 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rncb\" (UniqueName: \"kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493898 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s2s7\" (UniqueName: \"kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.493944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.494103 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595585 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rncb\" (UniqueName: \"kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595670 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s2s7\" (UniqueName: \"kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595717 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595763 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595826 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595850 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.595883 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.596537 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.601789 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.602899 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.604080 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.607748 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.616121 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s2s7\" (UniqueName: \"kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7\") pod \"nova-scheduler-0\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.616868 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rncb\" (UniqueName: \"kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb\") pod \"nova-api-0\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.723998 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.747239 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.976348 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9984c147-5425-4b69-ace5-52d351f46933" path="/var/lib/kubelet/pods/9984c147-5425-4b69-ace5-52d351f46933/volumes" Oct 09 13:48:52 crc kubenswrapper[4762]: I1009 13:48:52.977244 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2efe15b-237f-4bb4-a563-a2a488cf3ddb" path="/var/lib/kubelet/pods/b2efe15b-237f-4bb4-a563-a2a488cf3ddb/volumes" Oct 09 13:48:53 crc kubenswrapper[4762]: W1009 13:48:53.203727 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c824403_f051_4f6f_8fd3_947fed9754a2.slice/crio-b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b WatchSource:0}: Error finding container b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b: Status 404 returned error can't find the container with id b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b Oct 09 13:48:53 crc kubenswrapper[4762]: I1009 13:48:53.209068 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:48:53 crc kubenswrapper[4762]: I1009 13:48:53.285826 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:48:53 crc kubenswrapper[4762]: W1009 13:48:53.286471 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod637a6b30_9f8c_4ed9_b55f_43d0a04f54f6.slice/crio-b567443d0771fcc56ed891151ea261b17ed3f7f1604d2435e3e18225defa8453 WatchSource:0}: Error finding container b567443d0771fcc56ed891151ea261b17ed3f7f1604d2435e3e18225defa8453: Status 404 returned error can't find the container with id b567443d0771fcc56ed891151ea261b17ed3f7f1604d2435e3e18225defa8453 Oct 09 13:48:53 crc kubenswrapper[4762]: I1009 13:48:53.636567 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:48:53 crc kubenswrapper[4762]: I1009 13:48:53.636817 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.039872 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerStarted","Data":"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722"} Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.040279 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerStarted","Data":"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70"} Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.040334 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerStarted","Data":"b567443d0771fcc56ed891151ea261b17ed3f7f1604d2435e3e18225defa8453"} Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.042650 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c824403-f051-4f6f-8fd3-947fed9754a2","Type":"ContainerStarted","Data":"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251"} Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.042680 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c824403-f051-4f6f-8fd3-947fed9754a2","Type":"ContainerStarted","Data":"b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b"} Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.070419 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.070397882 podStartE2EDuration="2.070397882s" podCreationTimestamp="2025-10-09 13:48:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:54.056995403 +0000 UTC m=+1409.830786442" watchObservedRunningTime="2025-10-09 13:48:54.070397882 +0000 UTC m=+1409.844188921" Oct 09 13:48:54 crc kubenswrapper[4762]: I1009 13:48:54.076539 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.076523572 podStartE2EDuration="2.076523572s" podCreationTimestamp="2025-10-09 13:48:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:48:54.073905473 +0000 UTC m=+1409.847696512" watchObservedRunningTime="2025-10-09 13:48:54.076523572 +0000 UTC m=+1409.850314611" Oct 09 13:48:55 crc kubenswrapper[4762]: I1009 13:48:55.713628 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:55 crc kubenswrapper[4762]: I1009 13:48:55.714092 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" containerName="kube-state-metrics" containerID="cri-o://5b7d21cfa348f2da8a838bfcb268895e7e0fb5e13834121d2230a5e12208a9f4" gracePeriod=30 Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.086173 4762 generic.go:334] "Generic (PLEG): container finished" podID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" containerID="5b7d21cfa348f2da8a838bfcb268895e7e0fb5e13834121d2230a5e12208a9f4" exitCode=2 Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.086231 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ef0320a3-ddc0-42cb-9042-33b4b77994ff","Type":"ContainerDied","Data":"5b7d21cfa348f2da8a838bfcb268895e7e0fb5e13834121d2230a5e12208a9f4"} Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.208063 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.263016 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5lq4\" (UniqueName: \"kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4\") pod \"ef0320a3-ddc0-42cb-9042-33b4b77994ff\" (UID: \"ef0320a3-ddc0-42cb-9042-33b4b77994ff\") " Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.269477 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4" (OuterVolumeSpecName: "kube-api-access-r5lq4") pod "ef0320a3-ddc0-42cb-9042-33b4b77994ff" (UID: "ef0320a3-ddc0-42cb-9042-33b4b77994ff"). InnerVolumeSpecName "kube-api-access-r5lq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:48:56 crc kubenswrapper[4762]: I1009 13:48:56.365386 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5lq4\" (UniqueName: \"kubernetes.io/projected/ef0320a3-ddc0-42cb-9042-33b4b77994ff-kube-api-access-r5lq4\") on node \"crc\" DevicePath \"\"" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.095865 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ef0320a3-ddc0-42cb-9042-33b4b77994ff","Type":"ContainerDied","Data":"d5cc3e0ce1a759c00b21f89bcbc61168b82aa52e8425afa7df6ed9b8da450f04"} Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.095912 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.096199 4762 scope.go:117] "RemoveContainer" containerID="5b7d21cfa348f2da8a838bfcb268895e7e0fb5e13834121d2230a5e12208a9f4" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.124832 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.135565 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.148453 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: E1009 13:48:57.149999 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" containerName="kube-state-metrics" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.150041 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" containerName="kube-state-metrics" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.150312 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" containerName="kube-state-metrics" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.151366 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.153181 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.153383 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.160501 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.181390 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbngj\" (UniqueName: \"kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.181455 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.181531 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.181578 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.283679 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbngj\" (UniqueName: \"kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.283755 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.283809 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.283836 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.289580 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.289807 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.291107 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.300828 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbngj\" (UniqueName: \"kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj\") pod \"kube-state-metrics-0\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.480626 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.579434 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.580022 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-central-agent" containerID="cri-o://ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843" gracePeriod=30 Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.580484 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="proxy-httpd" containerID="cri-o://cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82" gracePeriod=30 Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.580538 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="sg-core" containerID="cri-o://06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b" gracePeriod=30 Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.580579 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-notification-agent" containerID="cri-o://e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e" gracePeriod=30 Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.732286 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.959980 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:48:57 crc kubenswrapper[4762]: W1009 13:48:57.966855 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c325595_23e7_465e_a533_181eef8ba528.slice/crio-dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82 WatchSource:0}: Error finding container dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82: Status 404 returned error can't find the container with id dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82 Oct 09 13:48:57 crc kubenswrapper[4762]: I1009 13:48:57.969435 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.104772 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7c325595-23e7-465e-a533-181eef8ba528","Type":"ContainerStarted","Data":"dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82"} Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107088 4762 generic.go:334] "Generic (PLEG): container finished" podID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerID="cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82" exitCode=0 Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107123 4762 generic.go:334] "Generic (PLEG): container finished" podID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerID="06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b" exitCode=2 Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107131 4762 generic.go:334] "Generic (PLEG): container finished" podID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerID="ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843" exitCode=0 Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107138 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerDied","Data":"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82"} Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107171 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerDied","Data":"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b"} Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.107183 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerDied","Data":"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843"} Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.636375 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.636761 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 13:48:58 crc kubenswrapper[4762]: I1009 13:48:58.985566 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef0320a3-ddc0-42cb-9042-33b4b77994ff" path="/var/lib/kubelet/pods/ef0320a3-ddc0-42cb-9042-33b4b77994ff/volumes" Oct 09 13:48:59 crc kubenswrapper[4762]: I1009 13:48:59.119006 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7c325595-23e7-465e-a533-181eef8ba528","Type":"ContainerStarted","Data":"85c1dff79b65efbb1d0077d4017bfcb9b84c3b899d4399ade4316429900aab80"} Oct 09 13:48:59 crc kubenswrapper[4762]: I1009 13:48:59.119344 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 09 13:48:59 crc kubenswrapper[4762]: I1009 13:48:59.141318 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.757157844 podStartE2EDuration="2.141297748s" podCreationTimestamp="2025-10-09 13:48:57 +0000 UTC" firstStartedPulling="2025-10-09 13:48:57.969225693 +0000 UTC m=+1413.743016732" lastFinishedPulling="2025-10-09 13:48:58.353365597 +0000 UTC m=+1414.127156636" observedRunningTime="2025-10-09 13:48:59.139058251 +0000 UTC m=+1414.912849300" watchObservedRunningTime="2025-10-09 13:48:59.141297748 +0000 UTC m=+1414.915088787" Oct 09 13:48:59 crc kubenswrapper[4762]: I1009 13:48:59.649873 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 13:48:59 crc kubenswrapper[4762]: I1009 13:48:59.649897 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.604036 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646611 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646676 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646700 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646720 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646784 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646831 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.646904 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlt92\" (UniqueName: \"kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92\") pod \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\" (UID: \"f22f1383-fdcf-4018-8a3d-a28c32c0c816\") " Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.647342 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.647899 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.654139 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92" (OuterVolumeSpecName: "kube-api-access-vlt92") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "kube-api-access-vlt92". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.659043 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts" (OuterVolumeSpecName: "scripts") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.685903 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.736238 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748666 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748706 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748719 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748734 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlt92\" (UniqueName: \"kubernetes.io/projected/f22f1383-fdcf-4018-8a3d-a28c32c0c816-kube-api-access-vlt92\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748746 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.748757 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f22f1383-fdcf-4018-8a3d-a28c32c0c816-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.758301 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data" (OuterVolumeSpecName: "config-data") pod "f22f1383-fdcf-4018-8a3d-a28c32c0c816" (UID: "f22f1383-fdcf-4018-8a3d-a28c32c0c816"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:00 crc kubenswrapper[4762]: I1009 13:49:00.849993 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f22f1383-fdcf-4018-8a3d-a28c32c0c816-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.139626 4762 generic.go:334] "Generic (PLEG): container finished" podID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerID="e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e" exitCode=0 Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.139700 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerDied","Data":"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e"} Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.139737 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.139767 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f22f1383-fdcf-4018-8a3d-a28c32c0c816","Type":"ContainerDied","Data":"f1d100458e3fd065ef92133ab63468e8bb6e93fb02a77b97a78f5df84682869b"} Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.139791 4762 scope.go:117] "RemoveContainer" containerID="cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.164980 4762 scope.go:117] "RemoveContainer" containerID="06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.168719 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.184344 4762 scope.go:117] "RemoveContainer" containerID="e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.190846 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.207200 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.207726 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="sg-core" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.207752 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="sg-core" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.207769 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-central-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.207777 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-central-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.207795 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="proxy-httpd" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.207803 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="proxy-httpd" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.207828 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-notification-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.207836 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-notification-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.208062 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="sg-core" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.208079 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-central-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.208099 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="proxy-httpd" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.208124 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" containerName="ceilometer-notification-agent" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.210110 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.217351 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.217676 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.217819 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.217989 4762 scope.go:117] "RemoveContainer" containerID="ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.232085 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259241 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259368 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259399 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259419 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259542 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259560 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s6ls\" (UniqueName: \"kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.259619 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.278016 4762 scope.go:117] "RemoveContainer" containerID="cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.278448 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82\": container with ID starting with cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82 not found: ID does not exist" containerID="cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.278477 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82"} err="failed to get container status \"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82\": rpc error: code = NotFound desc = could not find container \"cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82\": container with ID starting with cc703fa7263b50d7eb08c5c4f871a1a3194f30fcacedc96a95b33de13c8fcb82 not found: ID does not exist" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.278497 4762 scope.go:117] "RemoveContainer" containerID="06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.279202 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b\": container with ID starting with 06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b not found: ID does not exist" containerID="06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.279248 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b"} err="failed to get container status \"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b\": rpc error: code = NotFound desc = could not find container \"06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b\": container with ID starting with 06ad52967d34b9ea32fc5f3c7c7e479bcb7cb8cc39f8342d66ee809550e9024b not found: ID does not exist" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.279283 4762 scope.go:117] "RemoveContainer" containerID="e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.279933 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e\": container with ID starting with e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e not found: ID does not exist" containerID="e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.280026 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e"} err="failed to get container status \"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e\": rpc error: code = NotFound desc = could not find container \"e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e\": container with ID starting with e754d109af3252f9935f63e8a2ddadcdf2ca9788b6c2d4ad3eb33eef9084d86e not found: ID does not exist" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.280080 4762 scope.go:117] "RemoveContainer" containerID="ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843" Oct 09 13:49:01 crc kubenswrapper[4762]: E1009 13:49:01.280490 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843\": container with ID starting with ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843 not found: ID does not exist" containerID="ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.280524 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843"} err="failed to get container status \"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843\": rpc error: code = NotFound desc = could not find container \"ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843\": container with ID starting with ded23220fc58a5b74270cccae60e80e61f61a7352f87c7b55bac7f027c6fd843 not found: ID does not exist" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361236 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361286 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361329 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s6ls\" (UniqueName: \"kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361366 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361426 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361497 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.361566 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.365150 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.365170 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.367569 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.367569 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.368811 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.370181 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.379033 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.379355 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s6ls\" (UniqueName: \"kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls\") pod \"ceilometer-0\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " pod="openstack/ceilometer-0" Oct 09 13:49:01 crc kubenswrapper[4762]: I1009 13:49:01.577092 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:02 crc kubenswrapper[4762]: W1009 13:49:02.000499 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33dcd38f_e126_4635_9c44_48fd9fa3b51d.slice/crio-d6e0946d0759af9a90b7848e11a15fe59bf8711d7a62658c069f7cc5f7a83399 WatchSource:0}: Error finding container d6e0946d0759af9a90b7848e11a15fe59bf8711d7a62658c069f7cc5f7a83399: Status 404 returned error can't find the container with id d6e0946d0759af9a90b7848e11a15fe59bf8711d7a62658c069f7cc5f7a83399 Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.001963 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.150180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerStarted","Data":"d6e0946d0759af9a90b7848e11a15fe59bf8711d7a62658c069f7cc5f7a83399"} Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.724725 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.750057 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.750181 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.772463 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 13:49:02 crc kubenswrapper[4762]: I1009 13:49:02.979769 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f22f1383-fdcf-4018-8a3d-a28c32c0c816" path="/var/lib/kubelet/pods/f22f1383-fdcf-4018-8a3d-a28c32c0c816/volumes" Oct 09 13:49:03 crc kubenswrapper[4762]: I1009 13:49:03.163390 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerStarted","Data":"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27"} Oct 09 13:49:03 crc kubenswrapper[4762]: I1009 13:49:03.198271 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 13:49:03 crc kubenswrapper[4762]: I1009 13:49:03.835238 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:03 crc kubenswrapper[4762]: I1009 13:49:03.835283 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:04 crc kubenswrapper[4762]: I1009 13:49:04.174996 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerStarted","Data":"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e"} Oct 09 13:49:05 crc kubenswrapper[4762]: I1009 13:49:05.186537 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerStarted","Data":"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8"} Oct 09 13:49:07 crc kubenswrapper[4762]: I1009 13:49:07.211710 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerStarted","Data":"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281"} Oct 09 13:49:07 crc kubenswrapper[4762]: I1009 13:49:07.212659 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:49:07 crc kubenswrapper[4762]: I1009 13:49:07.489533 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 09 13:49:07 crc kubenswrapper[4762]: I1009 13:49:07.506066 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.889603375 podStartE2EDuration="6.506046044s" podCreationTimestamp="2025-10-09 13:49:01 +0000 UTC" firstStartedPulling="2025-10-09 13:49:02.003264859 +0000 UTC m=+1417.777055898" lastFinishedPulling="2025-10-09 13:49:06.619707528 +0000 UTC m=+1422.393498567" observedRunningTime="2025-10-09 13:49:07.240226245 +0000 UTC m=+1423.014017284" watchObservedRunningTime="2025-10-09 13:49:07.506046044 +0000 UTC m=+1423.279837083" Oct 09 13:49:08 crc kubenswrapper[4762]: I1009 13:49:08.643120 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 13:49:08 crc kubenswrapper[4762]: I1009 13:49:08.644778 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 13:49:08 crc kubenswrapper[4762]: I1009 13:49:08.648882 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 13:49:09 crc kubenswrapper[4762]: I1009 13:49:09.235588 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.241666 4762 generic.go:334] "Generic (PLEG): container finished" podID="f8ac6cff-2144-4d02-964f-8b7d866f0520" containerID="fc250e3d796c373b1c56af7e1cae32a615f042162866abd13fd13f6d9adf108b" exitCode=137 Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.241770 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8ac6cff-2144-4d02-964f-8b7d866f0520","Type":"ContainerDied","Data":"fc250e3d796c373b1c56af7e1cae32a615f042162866abd13fd13f6d9adf108b"} Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.243469 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f8ac6cff-2144-4d02-964f-8b7d866f0520","Type":"ContainerDied","Data":"2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a"} Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.243520 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d87ffbb4f07b05cfed04916e4e2e594810be2f948641437d165d358cfebc77a" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.286340 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.349776 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data\") pod \"f8ac6cff-2144-4d02-964f-8b7d866f0520\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.349892 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssdjb\" (UniqueName: \"kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb\") pod \"f8ac6cff-2144-4d02-964f-8b7d866f0520\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.350959 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle\") pod \"f8ac6cff-2144-4d02-964f-8b7d866f0520\" (UID: \"f8ac6cff-2144-4d02-964f-8b7d866f0520\") " Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.356953 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb" (OuterVolumeSpecName: "kube-api-access-ssdjb") pod "f8ac6cff-2144-4d02-964f-8b7d866f0520" (UID: "f8ac6cff-2144-4d02-964f-8b7d866f0520"). InnerVolumeSpecName "kube-api-access-ssdjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.384094 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8ac6cff-2144-4d02-964f-8b7d866f0520" (UID: "f8ac6cff-2144-4d02-964f-8b7d866f0520"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.388068 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data" (OuterVolumeSpecName: "config-data") pod "f8ac6cff-2144-4d02-964f-8b7d866f0520" (UID: "f8ac6cff-2144-4d02-964f-8b7d866f0520"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.452983 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.453032 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ac6cff-2144-4d02-964f-8b7d866f0520-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:10 crc kubenswrapper[4762]: I1009 13:49:10.453048 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssdjb\" (UniqueName: \"kubernetes.io/projected/f8ac6cff-2144-4d02-964f-8b7d866f0520-kube-api-access-ssdjb\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.251593 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.287669 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.306022 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.314515 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:49:11 crc kubenswrapper[4762]: E1009 13:49:11.315112 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ac6cff-2144-4d02-964f-8b7d866f0520" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.315137 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ac6cff-2144-4d02-964f-8b7d866f0520" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.315361 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ac6cff-2144-4d02-964f-8b7d866f0520" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.316219 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.319121 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.319322 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.319623 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.324300 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.377261 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xn27\" (UniqueName: \"kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.377319 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.377545 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.377770 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.377960 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.480252 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.480383 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.480468 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.480509 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xn27\" (UniqueName: \"kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.480539 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.485321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.485893 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.486678 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.487125 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.498838 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xn27\" (UniqueName: \"kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27\") pod \"nova-cell1-novncproxy-0\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.642078 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.969307 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:49:11 crc kubenswrapper[4762]: I1009 13:49:11.969358 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.095996 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.261108 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"aa137ef5-e1eb-4e54-a8e3-4a312e167837","Type":"ContainerStarted","Data":"dc2d9907e7697a0cfb8df77856e5e1d951b85a3b4800983699c815ff36fc1ac7"} Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.752361 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.752854 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.756002 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.759537 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 13:49:12 crc kubenswrapper[4762]: I1009 13:49:12.977015 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ac6cff-2144-4d02-964f-8b7d866f0520" path="/var/lib/kubelet/pods/f8ac6cff-2144-4d02-964f-8b7d866f0520/volumes" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.274189 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"aa137ef5-e1eb-4e54-a8e3-4a312e167837","Type":"ContainerStarted","Data":"280e1070b26b6e461abd511c142f62c2fe13a17c995043bb5a94a0b15d693e00"} Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.274227 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.287334 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.293359 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.293339626 podStartE2EDuration="2.293339626s" podCreationTimestamp="2025-10-09 13:49:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:13.291357974 +0000 UTC m=+1429.065149033" watchObservedRunningTime="2025-10-09 13:49:13.293339626 +0000 UTC m=+1429.067130665" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.487817 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.491617 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530419 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530505 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530556 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530588 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.530824 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnsjf\" (UniqueName: \"kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.534105 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632587 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnsjf\" (UniqueName: \"kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632785 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632847 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632871 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632941 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.632989 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.634186 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.634333 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.634803 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.634917 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.635265 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.663106 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnsjf\" (UniqueName: \"kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf\") pod \"dnsmasq-dns-5c7b6c5df9-77jpt\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:13 crc kubenswrapper[4762]: I1009 13:49:13.819074 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:14 crc kubenswrapper[4762]: W1009 13:49:14.340241 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fc7e86f_4e77_45a4_a90c_6b06d4907ca7.slice/crio-8d20e820d1db03907c6e36d4df28f97f67b91d5a9d6fc71befa90aefcfc61daa WatchSource:0}: Error finding container 8d20e820d1db03907c6e36d4df28f97f67b91d5a9d6fc71befa90aefcfc61daa: Status 404 returned error can't find the container with id 8d20e820d1db03907c6e36d4df28f97f67b91d5a9d6fc71befa90aefcfc61daa Oct 09 13:49:14 crc kubenswrapper[4762]: I1009 13:49:14.346671 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.307366 4762 generic.go:334] "Generic (PLEG): container finished" podID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerID="17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c" exitCode=0 Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.307587 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" event={"ID":"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7","Type":"ContainerDied","Data":"17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c"} Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.307760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" event={"ID":"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7","Type":"ContainerStarted","Data":"8d20e820d1db03907c6e36d4df28f97f67b91d5a9d6fc71befa90aefcfc61daa"} Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.373105 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.373604 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-central-agent" containerID="cri-o://cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27" gracePeriod=30 Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.373833 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-notification-agent" containerID="cri-o://8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e" gracePeriod=30 Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.373892 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="sg-core" containerID="cri-o://53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8" gracePeriod=30 Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.373908 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="proxy-httpd" containerID="cri-o://6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281" gracePeriod=30 Oct 09 13:49:15 crc kubenswrapper[4762]: I1009 13:49:15.970558 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319303 4762 generic.go:334] "Generic (PLEG): container finished" podID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerID="6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281" exitCode=0 Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319340 4762 generic.go:334] "Generic (PLEG): container finished" podID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerID="53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8" exitCode=2 Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319353 4762 generic.go:334] "Generic (PLEG): container finished" podID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerID="cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27" exitCode=0 Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319410 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerDied","Data":"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281"} Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319436 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerDied","Data":"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8"} Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.319447 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerDied","Data":"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27"} Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.322353 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" event={"ID":"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7","Type":"ContainerStarted","Data":"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b"} Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.322376 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-log" containerID="cri-o://54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70" gracePeriod=30 Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.322665 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-api" containerID="cri-o://c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722" gracePeriod=30 Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.347688 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" podStartSLOduration=3.347665612 podStartE2EDuration="3.347665612s" podCreationTimestamp="2025-10-09 13:49:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:16.345010612 +0000 UTC m=+1432.118801651" watchObservedRunningTime="2025-10-09 13:49:16.347665612 +0000 UTC m=+1432.121456681" Oct 09 13:49:16 crc kubenswrapper[4762]: I1009 13:49:16.643077 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.188551 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244510 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244654 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244691 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244725 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244878 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244924 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7s6ls\" (UniqueName: \"kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.245048 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.245103 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs\") pod \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\" (UID: \"33dcd38f-e126-4635-9c44-48fd9fa3b51d\") " Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.244914 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.246444 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.251685 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts" (OuterVolumeSpecName: "scripts") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.263379 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls" (OuterVolumeSpecName: "kube-api-access-7s6ls") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "kube-api-access-7s6ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.283824 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.332397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.337844 4762 generic.go:334] "Generic (PLEG): container finished" podID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerID="54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70" exitCode=143 Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.338013 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerDied","Data":"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70"} Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341249 4762 generic.go:334] "Generic (PLEG): container finished" podID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerID="8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e" exitCode=0 Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341370 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerDied","Data":"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e"} Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341402 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33dcd38f-e126-4635-9c44-48fd9fa3b51d","Type":"ContainerDied","Data":"d6e0946d0759af9a90b7848e11a15fe59bf8711d7a62658c069f7cc5f7a83399"} Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341442 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341463 4762 scope.go:117] "RemoveContainer" containerID="6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.341661 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.346950 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7s6ls\" (UniqueName: \"kubernetes.io/projected/33dcd38f-e126-4635-9c44-48fd9fa3b51d-kube-api-access-7s6ls\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.347144 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.347243 4762 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.347359 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.347415 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.347475 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33dcd38f-e126-4635-9c44-48fd9fa3b51d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.362912 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.376302 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data" (OuterVolumeSpecName: "config-data") pod "33dcd38f-e126-4635-9c44-48fd9fa3b51d" (UID: "33dcd38f-e126-4635-9c44-48fd9fa3b51d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.380035 4762 scope.go:117] "RemoveContainer" containerID="53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.406968 4762 scope.go:117] "RemoveContainer" containerID="8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.427940 4762 scope.go:117] "RemoveContainer" containerID="cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.447072 4762 scope.go:117] "RemoveContainer" containerID="6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.447473 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281\": container with ID starting with 6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281 not found: ID does not exist" containerID="6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.447513 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281"} err="failed to get container status \"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281\": rpc error: code = NotFound desc = could not find container \"6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281\": container with ID starting with 6c1ee18e92572ccf0146bb918898a9df5a6ec024b74a9907a8fef583409d0281 not found: ID does not exist" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.447542 4762 scope.go:117] "RemoveContainer" containerID="53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.447830 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8\": container with ID starting with 53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8 not found: ID does not exist" containerID="53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.447917 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8"} err="failed to get container status \"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8\": rpc error: code = NotFound desc = could not find container \"53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8\": container with ID starting with 53c325ed77d005dde26df2012fba7a09c94759752c598eedc58826390e72dff8 not found: ID does not exist" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.448007 4762 scope.go:117] "RemoveContainer" containerID="8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.448406 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e\": container with ID starting with 8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e not found: ID does not exist" containerID="8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.448455 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e"} err="failed to get container status \"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e\": rpc error: code = NotFound desc = could not find container \"8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e\": container with ID starting with 8219c83ad4176eae6a5174e7300dd566b876cc48f2dc59e23db541d1c378661e not found: ID does not exist" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.448487 4762 scope.go:117] "RemoveContainer" containerID="cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.448971 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27\": container with ID starting with cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27 not found: ID does not exist" containerID="cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.449002 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27"} err="failed to get container status \"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27\": rpc error: code = NotFound desc = could not find container \"cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27\": container with ID starting with cc0e42a6c336b10ff7529604845ef18643bb7d98153abd1d9097ec3edf329e27 not found: ID does not exist" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.449150 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.449174 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33dcd38f-e126-4635-9c44-48fd9fa3b51d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.675838 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.685792 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.704738 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.705444 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="proxy-httpd" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.705529 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="proxy-httpd" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.705611 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-central-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.705697 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-central-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.705773 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-notification-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.705826 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-notification-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: E1009 13:49:17.705885 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="sg-core" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.705966 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="sg-core" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.706181 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="proxy-httpd" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.706263 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-central-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.706353 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="sg-core" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.706425 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" containerName="ceilometer-notification-agent" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.708278 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.710855 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.711593 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.711714 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.718362 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866335 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866775 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866885 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stqtb\" (UniqueName: \"kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866914 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.866935 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.867096 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.867118 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968137 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968230 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968274 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stqtb\" (UniqueName: \"kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968301 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968326 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968395 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968418 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968473 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968599 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.968979 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.972374 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.972427 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.979066 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.982521 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.986388 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:17 crc kubenswrapper[4762]: I1009 13:49:17.988913 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stqtb\" (UniqueName: \"kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb\") pod \"ceilometer-0\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " pod="openstack/ceilometer-0" Oct 09 13:49:18 crc kubenswrapper[4762]: I1009 13:49:18.075954 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:49:18 crc kubenswrapper[4762]: W1009 13:49:18.546906 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96ee7a8b_0aa4_42d4_9dd3_5e0350f40ce3.slice/crio-1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7 WatchSource:0}: Error finding container 1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7: Status 404 returned error can't find the container with id 1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7 Oct 09 13:49:18 crc kubenswrapper[4762]: I1009 13:49:18.547887 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:49:18 crc kubenswrapper[4762]: I1009 13:49:18.984908 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33dcd38f-e126-4635-9c44-48fd9fa3b51d" path="/var/lib/kubelet/pods/33dcd38f-e126-4635-9c44-48fd9fa3b51d/volumes" Oct 09 13:49:19 crc kubenswrapper[4762]: I1009 13:49:19.363704 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerStarted","Data":"5a2c557d48c0ddb55e75b1d77d66c330f8602fb5590b5f44bfac8794c89b9776"} Oct 09 13:49:19 crc kubenswrapper[4762]: I1009 13:49:19.364056 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerStarted","Data":"1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7"} Oct 09 13:49:19 crc kubenswrapper[4762]: I1009 13:49:19.929334 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.023084 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs\") pod \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.025296 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle\") pod \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.025504 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data\") pod \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.025548 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rncb\" (UniqueName: \"kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb\") pod \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\" (UID: \"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6\") " Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.024413 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs" (OuterVolumeSpecName: "logs") pod "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" (UID: "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.046690 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb" (OuterVolumeSpecName: "kube-api-access-8rncb") pod "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" (UID: "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6"). InnerVolumeSpecName "kube-api-access-8rncb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.075354 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" (UID: "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.099620 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data" (OuterVolumeSpecName: "config-data") pod "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" (UID: "637a6b30-9f8c-4ed9-b55f-43d0a04f54f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.128394 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.128441 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.128458 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rncb\" (UniqueName: \"kubernetes.io/projected/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-kube-api-access-8rncb\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.128471 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.377286 4762 generic.go:334] "Generic (PLEG): container finished" podID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerID="c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722" exitCode=0 Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.377554 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerDied","Data":"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722"} Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.377586 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a6b30-9f8c-4ed9-b55f-43d0a04f54f6","Type":"ContainerDied","Data":"b567443d0771fcc56ed891151ea261b17ed3f7f1604d2435e3e18225defa8453"} Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.377602 4762 scope.go:117] "RemoveContainer" containerID="c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.377675 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.384351 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerStarted","Data":"f4d38c90d6503eb2b13feadda565a6997a41189134eb578e55d77d77cecf5418"} Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.384405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerStarted","Data":"feaef4de6cce1bbb2d4303079a7194f8a547f09def8c966e05793d81fbd5d515"} Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.412272 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.425040 4762 scope.go:117] "RemoveContainer" containerID="54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.426182 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.435556 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:20 crc kubenswrapper[4762]: E1009 13:49:20.436006 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-api" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.436024 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-api" Oct 09 13:49:20 crc kubenswrapper[4762]: E1009 13:49:20.436044 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-log" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.436051 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-log" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.436221 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-api" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.436241 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" containerName="nova-api-log" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.437190 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.441044 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.441063 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.443200 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.454891 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.473288 4762 scope.go:117] "RemoveContainer" containerID="c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722" Oct 09 13:49:20 crc kubenswrapper[4762]: E1009 13:49:20.476785 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722\": container with ID starting with c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722 not found: ID does not exist" containerID="c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.476827 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722"} err="failed to get container status \"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722\": rpc error: code = NotFound desc = could not find container \"c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722\": container with ID starting with c47eb940d52ce275fb583df2b0493170537ac491c37313c4b6bab7dcc2822722 not found: ID does not exist" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.476856 4762 scope.go:117] "RemoveContainer" containerID="54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70" Oct 09 13:49:20 crc kubenswrapper[4762]: E1009 13:49:20.478940 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70\": container with ID starting with 54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70 not found: ID does not exist" containerID="54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.479003 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70"} err="failed to get container status \"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70\": rpc error: code = NotFound desc = could not find container \"54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70\": container with ID starting with 54076708122d199cd43af8b0e038b37186d01b046e14e3e4fc13275f0a827c70 not found: ID does not exist" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537057 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537156 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmgn2\" (UniqueName: \"kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537251 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537278 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537333 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.537389 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639462 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639534 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639583 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmgn2\" (UniqueName: \"kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639698 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639720 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.639753 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.640471 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.644832 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.644892 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.645999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.647207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.661019 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmgn2\" (UniqueName: \"kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2\") pod \"nova-api-0\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.760245 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:20 crc kubenswrapper[4762]: I1009 13:49:20.978134 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="637a6b30-9f8c-4ed9-b55f-43d0a04f54f6" path="/var/lib/kubelet/pods/637a6b30-9f8c-4ed9-b55f-43d0a04f54f6/volumes" Oct 09 13:49:21 crc kubenswrapper[4762]: I1009 13:49:21.209551 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:21 crc kubenswrapper[4762]: W1009 13:49:21.214491 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81d29bcf_d612_49b9_987d_07d7ea0643f2.slice/crio-8bf01f2c7d7b275cd8529e4fb34a7a30ede8e9c5689b54999ec07ab8981a9c7f WatchSource:0}: Error finding container 8bf01f2c7d7b275cd8529e4fb34a7a30ede8e9c5689b54999ec07ab8981a9c7f: Status 404 returned error can't find the container with id 8bf01f2c7d7b275cd8529e4fb34a7a30ede8e9c5689b54999ec07ab8981a9c7f Oct 09 13:49:21 crc kubenswrapper[4762]: I1009 13:49:21.402935 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerStarted","Data":"8bf01f2c7d7b275cd8529e4fb34a7a30ede8e9c5689b54999ec07ab8981a9c7f"} Oct 09 13:49:21 crc kubenswrapper[4762]: I1009 13:49:21.642702 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:21 crc kubenswrapper[4762]: I1009 13:49:21.663833 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.449698 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerStarted","Data":"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f"} Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.450068 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerStarted","Data":"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb"} Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.460031 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerStarted","Data":"eb7446c781a3fc26e94e1a5d7268c99a91fefd167fad020d62497a265f46becd"} Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.460542 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.480088 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.480067178 podStartE2EDuration="2.480067178s" podCreationTimestamp="2025-10-09 13:49:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:22.473537316 +0000 UTC m=+1438.247328365" watchObservedRunningTime="2025-10-09 13:49:22.480067178 +0000 UTC m=+1438.253858217" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.484816 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.512590 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.987689756 podStartE2EDuration="5.5125673s" podCreationTimestamp="2025-10-09 13:49:17 +0000 UTC" firstStartedPulling="2025-10-09 13:49:18.549981481 +0000 UTC m=+1434.323772520" lastFinishedPulling="2025-10-09 13:49:22.074859025 +0000 UTC m=+1437.848650064" observedRunningTime="2025-10-09 13:49:22.493368907 +0000 UTC m=+1438.267159946" watchObservedRunningTime="2025-10-09 13:49:22.5125673 +0000 UTC m=+1438.286358339" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.667368 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-7q5p9"] Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.669216 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.675126 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.675333 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.681987 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-7q5p9"] Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.778754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.778845 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.778965 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcwkv\" (UniqueName: \"kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.779017 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.881090 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.881493 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.881556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcwkv\" (UniqueName: \"kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.881588 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.887496 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.887786 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.888813 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.916613 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcwkv\" (UniqueName: \"kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv\") pod \"nova-cell1-cell-mapping-7q5p9\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:22 crc kubenswrapper[4762]: I1009 13:49:22.991161 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:23 crc kubenswrapper[4762]: I1009 13:49:23.531123 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-7q5p9"] Oct 09 13:49:23 crc kubenswrapper[4762]: I1009 13:49:23.820796 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:49:23 crc kubenswrapper[4762]: I1009 13:49:23.884590 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:49:23 crc kubenswrapper[4762]: I1009 13:49:23.884884 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="dnsmasq-dns" containerID="cri-o://afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395" gracePeriod=10 Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.458112 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.477156 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-7q5p9" event={"ID":"5734e640-439a-45ff-8126-48eba63b8d4d","Type":"ContainerStarted","Data":"71077d84bc79017d2c9e27c25dd96dee36c7b4da89cb52d241107b28fbe63c61"} Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.477210 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-7q5p9" event={"ID":"5734e640-439a-45ff-8126-48eba63b8d4d","Type":"ContainerStarted","Data":"a61e6e2d38e293dd7cff7474d0b72959a4d7cd64ed89d0f71e08f327408d43ff"} Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.481362 4762 generic.go:334] "Generic (PLEG): container finished" podID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerID="afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395" exitCode=0 Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.481408 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" event={"ID":"2d928647-7b87-4be1-9b66-5b146f24f93a","Type":"ContainerDied","Data":"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395"} Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.481435 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" event={"ID":"2d928647-7b87-4be1-9b66-5b146f24f93a","Type":"ContainerDied","Data":"5af0e78373f3c6a974bca79b24219932580b2b263d3343abb32f73f3602ffc22"} Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.481458 4762 scope.go:117] "RemoveContainer" containerID="afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.481583 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-m7gs2" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.501175 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-7q5p9" podStartSLOduration=2.501160412 podStartE2EDuration="2.501160412s" podCreationTimestamp="2025-10-09 13:49:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:24.498757498 +0000 UTC m=+1440.272548537" watchObservedRunningTime="2025-10-09 13:49:24.501160412 +0000 UTC m=+1440.274951451" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.517401 4762 scope.go:117] "RemoveContainer" containerID="8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529281 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529342 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4ls7\" (UniqueName: \"kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529407 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529435 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529554 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.529646 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0\") pod \"2d928647-7b87-4be1-9b66-5b146f24f93a\" (UID: \"2d928647-7b87-4be1-9b66-5b146f24f93a\") " Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.536348 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7" (OuterVolumeSpecName: "kube-api-access-c4ls7") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "kube-api-access-c4ls7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.551908 4762 scope.go:117] "RemoveContainer" containerID="afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395" Oct 09 13:49:24 crc kubenswrapper[4762]: E1009 13:49:24.552417 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395\": container with ID starting with afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395 not found: ID does not exist" containerID="afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.552454 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395"} err="failed to get container status \"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395\": rpc error: code = NotFound desc = could not find container \"afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395\": container with ID starting with afb284ad3460ae4eab62636e991f111b8b6d1e2cba5e42e6c555c4873ea76395 not found: ID does not exist" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.552481 4762 scope.go:117] "RemoveContainer" containerID="8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb" Oct 09 13:49:24 crc kubenswrapper[4762]: E1009 13:49:24.552789 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb\": container with ID starting with 8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb not found: ID does not exist" containerID="8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.552814 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb"} err="failed to get container status \"8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb\": rpc error: code = NotFound desc = could not find container \"8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb\": container with ID starting with 8746c82d460c9126dfc0924a012c219d9a3a24d874022a42eaf5f4cbca8ef1eb not found: ID does not exist" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.604467 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.609066 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config" (OuterVolumeSpecName: "config") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.619184 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.624318 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.632398 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.632429 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.632440 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4ls7\" (UniqueName: \"kubernetes.io/projected/2d928647-7b87-4be1-9b66-5b146f24f93a-kube-api-access-c4ls7\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.632453 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.632462 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.633371 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d928647-7b87-4be1-9b66-5b146f24f93a" (UID: "2d928647-7b87-4be1-9b66-5b146f24f93a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.734256 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d928647-7b87-4be1-9b66-5b146f24f93a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.812435 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.819309 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-m7gs2"] Oct 09 13:49:24 crc kubenswrapper[4762]: I1009 13:49:24.976214 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" path="/var/lib/kubelet/pods/2d928647-7b87-4be1-9b66-5b146f24f93a/volumes" Oct 09 13:49:29 crc kubenswrapper[4762]: I1009 13:49:29.526841 4762 generic.go:334] "Generic (PLEG): container finished" podID="5734e640-439a-45ff-8126-48eba63b8d4d" containerID="71077d84bc79017d2c9e27c25dd96dee36c7b4da89cb52d241107b28fbe63c61" exitCode=0 Oct 09 13:49:29 crc kubenswrapper[4762]: I1009 13:49:29.526951 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-7q5p9" event={"ID":"5734e640-439a-45ff-8126-48eba63b8d4d","Type":"ContainerDied","Data":"71077d84bc79017d2c9e27c25dd96dee36c7b4da89cb52d241107b28fbe63c61"} Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.760875 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.761239 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.917164 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.955347 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data\") pod \"5734e640-439a-45ff-8126-48eba63b8d4d\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.955413 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcwkv\" (UniqueName: \"kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv\") pod \"5734e640-439a-45ff-8126-48eba63b8d4d\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.955444 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts\") pod \"5734e640-439a-45ff-8126-48eba63b8d4d\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.955485 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle\") pod \"5734e640-439a-45ff-8126-48eba63b8d4d\" (UID: \"5734e640-439a-45ff-8126-48eba63b8d4d\") " Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.983768 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts" (OuterVolumeSpecName: "scripts") pod "5734e640-439a-45ff-8126-48eba63b8d4d" (UID: "5734e640-439a-45ff-8126-48eba63b8d4d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:30 crc kubenswrapper[4762]: I1009 13:49:30.988800 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv" (OuterVolumeSpecName: "kube-api-access-dcwkv") pod "5734e640-439a-45ff-8126-48eba63b8d4d" (UID: "5734e640-439a-45ff-8126-48eba63b8d4d"). InnerVolumeSpecName "kube-api-access-dcwkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.001726 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data" (OuterVolumeSpecName: "config-data") pod "5734e640-439a-45ff-8126-48eba63b8d4d" (UID: "5734e640-439a-45ff-8126-48eba63b8d4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.014034 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5734e640-439a-45ff-8126-48eba63b8d4d" (UID: "5734e640-439a-45ff-8126-48eba63b8d4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.058121 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.058485 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcwkv\" (UniqueName: \"kubernetes.io/projected/5734e640-439a-45ff-8126-48eba63b8d4d-kube-api-access-dcwkv\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.058526 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.058534 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5734e640-439a-45ff-8126-48eba63b8d4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.546855 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-7q5p9" event={"ID":"5734e640-439a-45ff-8126-48eba63b8d4d","Type":"ContainerDied","Data":"a61e6e2d38e293dd7cff7474d0b72959a4d7cd64ed89d0f71e08f327408d43ff"} Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.546892 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a61e6e2d38e293dd7cff7474d0b72959a4d7cd64ed89d0f71e08f327408d43ff" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.546941 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-7q5p9" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.721261 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.721924 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-log" containerID="cri-o://65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb" gracePeriod=30 Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.722342 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-api" containerID="cri-o://bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f" gracePeriod=30 Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.732113 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": EOF" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.732248 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": EOF" Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.747308 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.747551 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" containerID="cri-o://d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46" gracePeriod=30 Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.747992 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" containerID="cri-o://7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a" gracePeriod=30 Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.769262 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:31 crc kubenswrapper[4762]: I1009 13:49:31.769477 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerName="nova-scheduler-scheduler" containerID="cri-o://20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" gracePeriod=30 Oct 09 13:49:32 crc kubenswrapper[4762]: I1009 13:49:32.557258 4762 generic.go:334] "Generic (PLEG): container finished" podID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerID="65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb" exitCode=143 Oct 09 13:49:32 crc kubenswrapper[4762]: I1009 13:49:32.557348 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerDied","Data":"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb"} Oct 09 13:49:32 crc kubenswrapper[4762]: I1009 13:49:32.559966 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerID="d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46" exitCode=143 Oct 09 13:49:32 crc kubenswrapper[4762]: I1009 13:49:32.560014 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerDied","Data":"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46"} Oct 09 13:49:32 crc kubenswrapper[4762]: E1009 13:49:32.726507 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:49:32 crc kubenswrapper[4762]: E1009 13:49:32.728654 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:49:32 crc kubenswrapper[4762]: E1009 13:49:32.730233 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:49:32 crc kubenswrapper[4762]: E1009 13:49:32.730269 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerName="nova-scheduler-scheduler" Oct 09 13:49:34 crc kubenswrapper[4762]: I1009 13:49:34.988750 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:51580->10.217.0.190:8775: read: connection reset by peer" Oct 09 13:49:34 crc kubenswrapper[4762]: I1009 13:49:34.988751 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:51576->10.217.0.190:8775: read: connection reset by peer" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.461701 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.553465 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bkqp\" (UniqueName: \"kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp\") pod \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.554698 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle\") pod \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.554736 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs\") pod \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.554778 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data\") pod \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.554811 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs\") pod \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\" (UID: \"fbddf4da-b50f-4ea5-9665-58211ecbeed3\") " Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.555179 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs" (OuterVolumeSpecName: "logs") pod "fbddf4da-b50f-4ea5-9665-58211ecbeed3" (UID: "fbddf4da-b50f-4ea5-9665-58211ecbeed3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.555680 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbddf4da-b50f-4ea5-9665-58211ecbeed3-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.565177 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp" (OuterVolumeSpecName: "kube-api-access-8bkqp") pod "fbddf4da-b50f-4ea5-9665-58211ecbeed3" (UID: "fbddf4da-b50f-4ea5-9665-58211ecbeed3"). InnerVolumeSpecName "kube-api-access-8bkqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.586772 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data" (OuterVolumeSpecName: "config-data") pod "fbddf4da-b50f-4ea5-9665-58211ecbeed3" (UID: "fbddf4da-b50f-4ea5-9665-58211ecbeed3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.590786 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerID="7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a" exitCode=0 Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.590827 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.590887 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerDied","Data":"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a"} Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.591030 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbddf4da-b50f-4ea5-9665-58211ecbeed3","Type":"ContainerDied","Data":"33daca6339931d222d2c3256862ee48b0e4028e67aee3eb978c0a455ea9dd335"} Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.591087 4762 scope.go:117] "RemoveContainer" containerID="7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.591573 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbddf4da-b50f-4ea5-9665-58211ecbeed3" (UID: "fbddf4da-b50f-4ea5-9665-58211ecbeed3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.609308 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "fbddf4da-b50f-4ea5-9665-58211ecbeed3" (UID: "fbddf4da-b50f-4ea5-9665-58211ecbeed3"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.657806 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bkqp\" (UniqueName: \"kubernetes.io/projected/fbddf4da-b50f-4ea5-9665-58211ecbeed3-kube-api-access-8bkqp\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.657846 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.657859 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.657875 4762 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbddf4da-b50f-4ea5-9665-58211ecbeed3-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.668156 4762 scope.go:117] "RemoveContainer" containerID="d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.690282 4762 scope.go:117] "RemoveContainer" containerID="7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.690844 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a\": container with ID starting with 7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a not found: ID does not exist" containerID="7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.690884 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a"} err="failed to get container status \"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a\": rpc error: code = NotFound desc = could not find container \"7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a\": container with ID starting with 7ad2818a82234828b778f79e21e312848f34a84974fdcbcd51d66e394513b56a not found: ID does not exist" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.690913 4762 scope.go:117] "RemoveContainer" containerID="d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.692166 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46\": container with ID starting with d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46 not found: ID does not exist" containerID="d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.692192 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46"} err="failed to get container status \"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46\": rpc error: code = NotFound desc = could not find container \"d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46\": container with ID starting with d5e79170083593fdb808baed427cd06671b6a2833758056a5f1875436d946a46 not found: ID does not exist" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.935692 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.947499 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.966059 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.967845 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5734e640-439a-45ff-8126-48eba63b8d4d" containerName="nova-manage" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.967879 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5734e640-439a-45ff-8126-48eba63b8d4d" containerName="nova-manage" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.967932 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="dnsmasq-dns" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.967943 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="dnsmasq-dns" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.967954 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.967965 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.967982 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.967994 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" Oct 09 13:49:35 crc kubenswrapper[4762]: E1009 13:49:35.968006 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="init" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.968018 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="init" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.968355 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-metadata" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.968390 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5734e640-439a-45ff-8126-48eba63b8d4d" containerName="nova-manage" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.968405 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d928647-7b87-4be1-9b66-5b146f24f93a" containerName="dnsmasq-dns" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.968419 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" containerName="nova-metadata-log" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.970050 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.971987 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.976591 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 13:49:35 crc kubenswrapper[4762]: I1009 13:49:35.978063 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.072985 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.073026 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.073067 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.073245 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.073370 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn5lb\" (UniqueName: \"kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175242 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175298 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175362 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175425 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175479 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn5lb\" (UniqueName: \"kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.175975 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.179466 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.179478 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.180278 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.195299 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn5lb\" (UniqueName: \"kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb\") pod \"nova-metadata-0\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.297911 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.912249 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:49:36 crc kubenswrapper[4762]: W1009 13:49:36.920475 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd09898b1_4f64_490a_bf9f_eb03d7219d8d.slice/crio-100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744 WatchSource:0}: Error finding container 100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744: Status 404 returned error can't find the container with id 100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744 Oct 09 13:49:36 crc kubenswrapper[4762]: I1009 13:49:36.994149 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbddf4da-b50f-4ea5-9665-58211ecbeed3" path="/var/lib/kubelet/pods/fbddf4da-b50f-4ea5-9665-58211ecbeed3/volumes" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.055141 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.094274 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s2s7\" (UniqueName: \"kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7\") pod \"3c824403-f051-4f6f-8fd3-947fed9754a2\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.094512 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data\") pod \"3c824403-f051-4f6f-8fd3-947fed9754a2\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.094594 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle\") pod \"3c824403-f051-4f6f-8fd3-947fed9754a2\" (UID: \"3c824403-f051-4f6f-8fd3-947fed9754a2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.115536 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7" (OuterVolumeSpecName: "kube-api-access-2s2s7") pod "3c824403-f051-4f6f-8fd3-947fed9754a2" (UID: "3c824403-f051-4f6f-8fd3-947fed9754a2"). InnerVolumeSpecName "kube-api-access-2s2s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.153855 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c824403-f051-4f6f-8fd3-947fed9754a2" (UID: "3c824403-f051-4f6f-8fd3-947fed9754a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.159806 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data" (OuterVolumeSpecName: "config-data") pod "3c824403-f051-4f6f-8fd3-947fed9754a2" (UID: "3c824403-f051-4f6f-8fd3-947fed9754a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.197749 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.197781 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c824403-f051-4f6f-8fd3-947fed9754a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.197793 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s2s7\" (UniqueName: \"kubernetes.io/projected/3c824403-f051-4f6f-8fd3-947fed9754a2-kube-api-access-2s2s7\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.527575 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607180 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607287 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607404 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmgn2\" (UniqueName: \"kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607558 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.607656 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs\") pod \"81d29bcf-d612-49b9-987d-07d7ea0643f2\" (UID: \"81d29bcf-d612-49b9-987d-07d7ea0643f2\") " Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.608179 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs" (OuterVolumeSpecName: "logs") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.622284 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2" (OuterVolumeSpecName: "kube-api-access-lmgn2") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "kube-api-access-lmgn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.625022 4762 generic.go:334] "Generic (PLEG): container finished" podID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerID="bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f" exitCode=0 Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.625078 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerDied","Data":"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.625104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"81d29bcf-d612-49b9-987d-07d7ea0643f2","Type":"ContainerDied","Data":"8bf01f2c7d7b275cd8529e4fb34a7a30ede8e9c5689b54999ec07ab8981a9c7f"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.625119 4762 scope.go:117] "RemoveContainer" containerID="bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.625227 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.627157 4762 generic.go:334] "Generic (PLEG): container finished" podID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" exitCode=0 Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.627234 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.627392 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c824403-f051-4f6f-8fd3-947fed9754a2","Type":"ContainerDied","Data":"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.627499 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c824403-f051-4f6f-8fd3-947fed9754a2","Type":"ContainerDied","Data":"b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.637719 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerStarted","Data":"e126396d1ff84e0d194527868fb00994e52bf6887b97b8770716230bf623f284"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.637768 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerStarted","Data":"c97286ccff2b823606d77c2401198e8d96d31e2569c5791d38c297fc823abe2b"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.637780 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerStarted","Data":"100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744"} Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.641929 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data" (OuterVolumeSpecName: "config-data") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.643034 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.653857 4762 scope.go:117] "RemoveContainer" containerID="65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.658600 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.658581784 podStartE2EDuration="2.658581784s" podCreationTimestamp="2025-10-09 13:49:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:37.655139743 +0000 UTC m=+1453.428930812" watchObservedRunningTime="2025-10-09 13:49:37.658581784 +0000 UTC m=+1453.432372823" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.688685 4762 scope.go:117] "RemoveContainer" containerID="bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f" Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.694710 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f\": container with ID starting with bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f not found: ID does not exist" containerID="bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.694809 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f"} err="failed to get container status \"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f\": rpc error: code = NotFound desc = could not find container \"bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f\": container with ID starting with bc3086d9271063c14323cf54223e084102c1c6a4ab7576b8816ba6b32262178f not found: ID does not exist" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.694896 4762 scope.go:117] "RemoveContainer" containerID="65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.696905 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.697527 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.697568 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb\": container with ID starting with 65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb not found: ID does not exist" containerID="65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.697603 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb"} err="failed to get container status \"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb\": rpc error: code = NotFound desc = could not find container \"65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb\": container with ID starting with 65a85270b622416f6e1fbc788a734bd6f74e0092a41ab3ec1010b56d4d946dfb not found: ID does not exist" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.697628 4762 scope.go:117] "RemoveContainer" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.700132 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "81d29bcf-d612-49b9-987d-07d7ea0643f2" (UID: "81d29bcf-d612-49b9-987d-07d7ea0643f2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.708921 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.710590 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.712220 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.712295 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.712349 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmgn2\" (UniqueName: \"kubernetes.io/projected/81d29bcf-d612-49b9-987d-07d7ea0643f2-kube-api-access-lmgn2\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.712441 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81d29bcf-d612-49b9-987d-07d7ea0643f2-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.712501 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81d29bcf-d612-49b9-987d-07d7ea0643f2-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.721999 4762 scope.go:117] "RemoveContainer" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.722750 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251\": container with ID starting with 20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251 not found: ID does not exist" containerID="20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.722779 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251"} err="failed to get container status \"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251\": rpc error: code = NotFound desc = could not find container \"20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251\": container with ID starting with 20d98a746ba255dae07bc52fa6cc410f47ffe7d26f5ecd0e0bc7866a47097251 not found: ID does not exist" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.723665 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.724062 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-log" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724080 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-log" Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.724095 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-api" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724101 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-api" Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.724141 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerName="nova-scheduler-scheduler" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724148 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerName="nova-scheduler-scheduler" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724306 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-api" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724320 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" containerName="nova-scheduler-scheduler" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724333 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" containerName="nova-api-log" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.724959 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.729002 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.738654 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.814731 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.814996 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74m8v\" (UniqueName: \"kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.815149 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: E1009 13:49:37.842419 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c824403_f051_4f6f_8fd3_947fed9754a2.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c824403_f051_4f6f_8fd3_947fed9754a2.slice/crio-b527e95ab075d2d902ff0db08e0407521cbd4bb8f8d4ce18d1ee6c7856b7498b\": RecentStats: unable to find data in memory cache]" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.916947 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.917321 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74m8v\" (UniqueName: \"kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.917375 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.921219 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.921442 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.942076 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74m8v\" (UniqueName: \"kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v\") pod \"nova-scheduler-0\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " pod="openstack/nova-scheduler-0" Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.972797 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:37 crc kubenswrapper[4762]: I1009 13:49:37.991856 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.002452 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.004557 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.007047 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.007413 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.007616 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.013970 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.049878 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.121820 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.121916 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.121948 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.121991 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.122036 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.122087 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcvsn\" (UniqueName: \"kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.224029 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcvsn\" (UniqueName: \"kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.224399 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.224449 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.224472 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.224504 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.225958 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.226451 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.229552 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.231798 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.232155 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.232332 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.247243 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcvsn\" (UniqueName: \"kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn\") pod \"nova-api-0\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.330265 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.507825 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.649230 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e56a40fb-847a-4e59-981b-369559466cb1","Type":"ContainerStarted","Data":"6696d15f88c8972c2b6bf5003a4c023a5e46e3892f11e4168f0b3b03660ceba5"} Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.777222 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:49:38 crc kubenswrapper[4762]: W1009 13:49:38.778971 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5b14562_fc3d_48fc_b83d_c12e5c9617c6.slice/crio-eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d WatchSource:0}: Error finding container eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d: Status 404 returned error can't find the container with id eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.979750 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c824403-f051-4f6f-8fd3-947fed9754a2" path="/var/lib/kubelet/pods/3c824403-f051-4f6f-8fd3-947fed9754a2/volumes" Oct 09 13:49:38 crc kubenswrapper[4762]: I1009 13:49:38.980330 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d29bcf-d612-49b9-987d-07d7ea0643f2" path="/var/lib/kubelet/pods/81d29bcf-d612-49b9-987d-07d7ea0643f2/volumes" Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.662581 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e56a40fb-847a-4e59-981b-369559466cb1","Type":"ContainerStarted","Data":"20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb"} Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.665059 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerStarted","Data":"42df5b43b4196e95eec2dbdedd25a25e5ecb0e0d6a17528ae0a9b5af3b08a452"} Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.665119 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerStarted","Data":"2d7aba0bb4082b2fef3387ae71a6137c2f97c32898d88c293c1602e2961e85cb"} Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.665134 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerStarted","Data":"eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d"} Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.689771 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.689741711 podStartE2EDuration="2.689741711s" podCreationTimestamp="2025-10-09 13:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:39.681334771 +0000 UTC m=+1455.455125810" watchObservedRunningTime="2025-10-09 13:49:39.689741711 +0000 UTC m=+1455.463532750" Oct 09 13:49:39 crc kubenswrapper[4762]: I1009 13:49:39.710014 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.709991923 podStartE2EDuration="2.709991923s" podCreationTimestamp="2025-10-09 13:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:49:39.699450396 +0000 UTC m=+1455.473241445" watchObservedRunningTime="2025-10-09 13:49:39.709991923 +0000 UTC m=+1455.483782962" Oct 09 13:49:41 crc kubenswrapper[4762]: I1009 13:49:41.298155 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:49:41 crc kubenswrapper[4762]: I1009 13:49:41.298232 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 13:49:41 crc kubenswrapper[4762]: I1009 13:49:41.969849 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:49:41 crc kubenswrapper[4762]: I1009 13:49:41.970064 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:49:43 crc kubenswrapper[4762]: I1009 13:49:43.051091 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 13:49:46 crc kubenswrapper[4762]: I1009 13:49:46.298262 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 13:49:46 crc kubenswrapper[4762]: I1009 13:49:46.298909 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 13:49:47 crc kubenswrapper[4762]: I1009 13:49:47.309948 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:47 crc kubenswrapper[4762]: I1009 13:49:47.309952 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.056164 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.094765 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.100602 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.331473 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.331524 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 13:49:48 crc kubenswrapper[4762]: I1009 13:49:48.770530 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 13:49:49 crc kubenswrapper[4762]: I1009 13:49:49.343822 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:49 crc kubenswrapper[4762]: I1009 13:49:49.343861 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 13:49:56 crc kubenswrapper[4762]: I1009 13:49:56.303247 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 13:49:56 crc kubenswrapper[4762]: I1009 13:49:56.303913 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 13:49:56 crc kubenswrapper[4762]: I1009 13:49:56.313847 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 13:49:56 crc kubenswrapper[4762]: I1009 13:49:56.317111 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.340315 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.341569 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.342567 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.352670 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.838607 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 13:49:58 crc kubenswrapper[4762]: I1009 13:49:58.845422 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 13:50:11 crc kubenswrapper[4762]: I1009 13:50:11.970799 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:50:11 crc kubenswrapper[4762]: I1009 13:50:11.971260 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:50:11 crc kubenswrapper[4762]: I1009 13:50:11.971296 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:50:11 crc kubenswrapper[4762]: I1009 13:50:11.971986 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:50:11 crc kubenswrapper[4762]: I1009 13:50:11.972035 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc" gracePeriod=600 Oct 09 13:50:12 crc kubenswrapper[4762]: I1009 13:50:12.986265 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc" exitCode=0 Oct 09 13:50:12 crc kubenswrapper[4762]: I1009 13:50:12.986346 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc"} Oct 09 13:50:12 crc kubenswrapper[4762]: I1009 13:50:12.986910 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014"} Oct 09 13:50:12 crc kubenswrapper[4762]: I1009 13:50:12.986946 4762 scope.go:117] "RemoveContainer" containerID="b2aad5d4c295d0a00a0ffcfb5183a47f48def84cfba6c0072cb314e437157ce7" Oct 09 13:50:16 crc kubenswrapper[4762]: I1009 13:50:16.946969 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:16 crc kubenswrapper[4762]: I1009 13:50:16.959540 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.009901 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.114080 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.114385 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="cinder-scheduler" containerID="cri-o://62e66f771f074a66fee0a95196e2984ab7d958da1caa8b50ceaee3f6fd946317" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.114829 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="probe" containerID="cri-o://be1b69b00d7fde9a938acf59f47549f46af90d5b80b3f6b68e63608f6eeeefc0" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.140983 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcv2m\" (UniqueName: \"kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.141065 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.141102 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.141207 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.141260 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.157920 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.158153 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="1862d6d3-5d91-47cf-8b78-c0298569ee90" containerName="openstackclient" containerID="cri-o://4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e" gracePeriod=2 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.175614 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.220607 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.238924 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-577955c56b-wlqdm"] Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.239355 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1862d6d3-5d91-47cf-8b78-c0298569ee90" containerName="openstackclient" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.239372 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1862d6d3-5d91-47cf-8b78-c0298569ee90" containerName="openstackclient" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.239683 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1862d6d3-5d91-47cf-8b78-c0298569ee90" containerName="openstackclient" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.240710 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242279 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242333 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242399 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcv2m\" (UniqueName: \"kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242441 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242471 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.242915 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.254895 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6cd5b6946b-r2c8l"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.257270 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.262955 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.263058 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.274487 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.276437 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-577955c56b-wlqdm"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.297730 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cd5b6946b-r2c8l"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.304445 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcv2m\" (UniqueName: \"kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m\") pod \"barbican-worker-5d57d9d56c-xrhgn\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.321612 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.341764 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.342061 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api-log" containerID="cri-o://5acb9f40d37976787a3176932ab61dc744bb85f63d3756a35a7aa17489a36b17" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.342188 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api" containerID="cri-o://c1be68916a3f81c7b6abc9ef288db0ad9f260fbe6b28ab389555e8e399229389" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.362731 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.362789 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data podName:0ca3d4c1-b9e5-4443-8102-7739602cbd2f nodeName:}" failed. No retries permitted until 2025-10-09 13:50:17.86277398 +0000 UTC m=+1493.636565019 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data") pod "rabbitmq-server-0" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f") : configmap "rabbitmq-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.426956 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.428274 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.455434 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472189 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472242 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472312 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472356 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472432 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472471 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472503 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472532 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472548 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472571 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472594 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.472608 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.481253 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.481539 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-df85s" podUID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" containerName="openstack-network-exporter" containerID="cri-o://832e14666bf13cb6c348c0af2b44c1f63087d249a79e7a68cc967f3268d56c21" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.542694 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.565609 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.623681 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.623815 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqsk6\" (UniqueName: \"kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6\") pod \"neutronb30a-account-delete-hlx4d\" (UID: \"b8e2afde-2991-4771-ae61-dc363b0d5f04\") " pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.623853 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624061 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624097 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624343 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624559 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624600 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624844 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.624892 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.625025 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.625077 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.625276 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.627618 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.627743 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.12771775 +0000 UTC m=+1493.901508789 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : secret "barbican-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.639023 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.641048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.641980 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.654243 4762 projected.go:194] Error preparing data for projected volume kube-api-access-zf8xq for pod openstack/barbican-keystone-listener-577955c56b-wlqdm: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.654320 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.154302006 +0000 UTC m=+1493.928093045 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-zf8xq" (UniqueName: "kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.655830 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.655922 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.155901328 +0000 UTC m=+1493.929692367 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : secret "barbican-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.666970 4762 projected.go:194] Error preparing data for projected volume kube-api-access-j9sqm for pod openstack/barbican-api-6cd5b6946b-r2c8l: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.667912 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.669319 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.169278059 +0000 UTC m=+1493.943069098 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-j9sqm" (UniqueName: "kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.703500 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.710850 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.714624 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.733548 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.733589 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.734040 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.737080 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqsk6\" (UniqueName: \"kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6\") pod \"neutronb30a-account-delete-hlx4d\" (UID: \"b8e2afde-2991-4771-ae61-dc363b0d5f04\") " pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.789896 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.813490 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqsk6\" (UniqueName: \"kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6\") pod \"neutronb30a-account-delete-hlx4d\" (UID: \"b8e2afde-2991-4771-ae61-dc363b0d5f04\") " pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.828054 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.828314 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" containerID="cri-o://4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.828796 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="openstack-network-exporter" containerID="cri-o://5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a" gracePeriod=30 Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.840206 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmw7j\" (UniqueName: \"kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j\") pod \"glance7662-account-delete-9tfgj\" (UID: \"80456c7c-9571-4520-989a-53654daad82c\") " pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.859663 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-m6k5r"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.900863 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-m6k5r"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.938991 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hl7q6"] Oct 09 13:50:17 crc kubenswrapper[4762]: I1009 13:50:17.943049 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmw7j\" (UniqueName: \"kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j\") pod \"glance7662-account-delete-9tfgj\" (UID: \"80456c7c-9571-4520-989a-53654daad82c\") " pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.943456 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 09 13:50:17 crc kubenswrapper[4762]: E1009 13:50:17.943503 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data podName:0ca3d4c1-b9e5-4443-8102-7739602cbd2f nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.943488292 +0000 UTC m=+1494.717279331 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data") pod "rabbitmq-server-0" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f") : configmap "rabbitmq-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.019692 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hl7q6"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.067113 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmw7j\" (UniqueName: \"kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j\") pod \"glance7662-account-delete-9tfgj\" (UID: \"80456c7c-9571-4520-989a-53654daad82c\") " pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.077532 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-vxzqs"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.107698 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.123655 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-vxzqs"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.133865 4762 generic.go:334] "Generic (PLEG): container finished" podID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerID="5acb9f40d37976787a3176932ab61dc744bb85f63d3756a35a7aa17489a36b17" exitCode=143 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.133977 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerDied","Data":"5acb9f40d37976787a3176932ab61dc744bb85f63d3756a35a7aa17489a36b17"} Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.157016 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.159852 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.157651 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.160270 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.160486 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.157731 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.161880 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:19.161862843 +0000 UTC m=+1494.935653882 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : secret "barbican-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.161207 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.161906 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:19.161900544 +0000 UTC m=+1494.935691583 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : secret "barbican-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.163009 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="openstack-network-exporter" containerID="cri-o://bfd114b69745f91fa863504ab2154e63a593800c553e012f664f20178ef3e182" gracePeriod=300 Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.172714 4762 projected.go:194] Error preparing data for projected volume kube-api-access-zf8xq for pod openstack/barbican-keystone-listener-577955c56b-wlqdm: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.172787 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:19.172771808 +0000 UTC m=+1494.946562847 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-zf8xq" (UniqueName: "kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.174234 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-df85s_39780bc9-4ec7-4578-b64e-40c2a1bba06c/openstack-network-exporter/0.log" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.174271 4762 generic.go:334] "Generic (PLEG): container finished" podID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" containerID="832e14666bf13cb6c348c0af2b44c1f63087d249a79e7a68cc967f3268d56c21" exitCode=2 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.174326 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df85s" event={"ID":"39780bc9-4ec7-4578-b64e-40c2a1bba06c","Type":"ContainerDied","Data":"832e14666bf13cb6c348c0af2b44c1f63087d249a79e7a68cc967f3268d56c21"} Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.192896 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.194252 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.197417 4762 generic.go:334] "Generic (PLEG): container finished" podID="39888a9b-c1cd-496e-b44e-a27212faac74" containerID="5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a" exitCode=2 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.197457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerDied","Data":"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a"} Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.220168 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.221197 4762 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-nknqr" message="Exiting ovn-controller (1) " Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.221222 4762 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-nknqr" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" containerID="cri-o://3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.221323 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-nknqr" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" containerID="cri-o://3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.262214 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.265914 4762 projected.go:194] Error preparing data for projected volume kube-api-access-j9sqm for pod openstack/barbican-api-6cd5b6946b-r2c8l: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.265986 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:19.2659675 +0000 UTC m=+1495.039758539 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-j9sqm" (UniqueName: "kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.273540 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.295318 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.295921 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="openstack-network-exporter" containerID="cri-o://a8a5046654c8bc5cfa5fef4fa4b6f0c7fc095d9255b7f318a934a8f57d41e38e" gracePeriod=300 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.333721 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.341506 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.352106 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.363805 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zl4p\" (UniqueName: \"kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p\") pod \"placementd6ff-account-delete-xl4vt\" (UID: \"ecf33027-d452-4ccd-a23f-52697374958c\") " pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.366378 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.381793 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data podName:c3841f71-5204-469f-b755-e030281725d1 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:18.881758484 +0000 UTC m=+1494.655549523 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data") pod "rabbitmq-cell1-server-0" (UID: "c3841f71-5204-469f-b755-e030281725d1") : configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.391020 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-glh4c"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.415060 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-glh4c"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.423699 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="ovsdbserver-sb" containerID="cri-o://0298fedbb6423e6e1f4c3a0b9c472563bf0d4a5e060e57f78e885a92bc20aaf1" gracePeriod=300 Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.458741 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.465481 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zl4p\" (UniqueName: \"kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p\") pod \"placementd6ff-account-delete-xl4vt\" (UID: \"ecf33027-d452-4ccd-a23f-52697374958c\") " pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.465535 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4wff\" (UniqueName: \"kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff\") pod \"cinder1fff-account-delete-zn6rx\" (UID: \"ffc2314d-5e5f-45e6-9134-9879b35e0f2d\") " pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.479791 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187 is running failed: container process not found" containerID="3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.479948 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.485689 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-4tqkk"] Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.509174 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187 is running failed: container process not found" containerID="3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.512168 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zl4p\" (UniqueName: \"kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p\") pod \"placementd6ff-account-delete-xl4vt\" (UID: \"ecf33027-d452-4ccd-a23f-52697374958c\") " pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.516108 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.516398 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-log" containerID="cri-o://00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.516889 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-httpd" containerID="cri-o://13924ee637a2d8e6d84bc0168317a287fc78cd42d90586f3f1c1c46d4723c487" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.524033 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.527760 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.536546 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.536904 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.537166 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187 is running failed: container process not found" containerID="3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 09 13:50:18 crc kubenswrapper[4762]: E1009 13:50:18.537192 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-nknqr" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.537261 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="ovsdbserver-nb" containerID="cri-o://cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" gracePeriod=300 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.577148 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4wff\" (UniqueName: \"kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff\") pod \"cinder1fff-account-delete-zn6rx\" (UID: \"ffc2314d-5e5f-45e6-9134-9879b35e0f2d\") " pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.604547 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.604800 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-c7fd4cf7c-phtkg" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-api" containerID="cri-o://3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.604935 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4wff\" (UniqueName: \"kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff\") pod \"cinder1fff-account-delete-zn6rx\" (UID: \"ffc2314d-5e5f-45e6-9134-9879b35e0f2d\") " pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.605160 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-c7fd4cf7c-phtkg" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-httpd" containerID="cri-o://468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.637794 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-4tqkk"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.706046 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t2mb\" (UniqueName: \"kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb\") pod \"barbican272a-account-delete-z2pw8\" (UID: \"d71b766d-302c-456b-ae8f-9befcb684b6a\") " pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.741408 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:18 crc kubenswrapper[4762]: W1009 13:50:18.743831 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod655c116e_99bf_4f34_a050_bbc1aa3e9b62.slice/crio-8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265 WatchSource:0}: Error finding container 8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265: Status 404 returned error can't find the container with id 8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.745336 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.782750 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.816412 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t2mb\" (UniqueName: \"kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb\") pod \"barbican272a-account-delete-z2pw8\" (UID: \"d71b766d-302c-456b-ae8f-9befcb684b6a\") " pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.831076 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.831786 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-server" containerID="cri-o://00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832687 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="swift-recon-cron" containerID="cri-o://eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832761 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="rsync" containerID="cri-o://72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832793 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-expirer" containerID="cri-o://66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832840 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-updater" containerID="cri-o://e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832868 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-auditor" containerID="cri-o://e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832937 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-replicator" containerID="cri-o://8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.832992 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-server" containerID="cri-o://5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833031 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-updater" containerID="cri-o://567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833079 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-auditor" containerID="cri-o://acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833140 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-replicator" containerID="cri-o://97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833174 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-server" containerID="cri-o://4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833222 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-reaper" containerID="cri-o://eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.833267 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-auditor" containerID="cri-o://27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.836924 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-replicator" containerID="cri-o://8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95" gracePeriod=30 Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.871250 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t2mb\" (UniqueName: \"kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb\") pod \"barbican272a-account-delete-z2pw8\" (UID: \"d71b766d-302c-456b-ae8f-9befcb684b6a\") " pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.882146 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:18 crc kubenswrapper[4762]: I1009 13:50:18.883602 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:18.921939 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:18.932265 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-tdgxl"] Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:18.934136 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:18.934195 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data podName:c3841f71-5204-469f-b755-e030281725d1 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:19.934179755 +0000 UTC m=+1495.707970794 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data") pod "rabbitmq-cell1-server-0" (UID: "c3841f71-5204-469f-b755-e030281725d1") : configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:18.956998 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-tdgxl"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.037758 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsmzk\" (UniqueName: \"kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk\") pod \"novaapi3399-account-delete-l75cx\" (UID: \"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2\") " pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.038335 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.038429 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data podName:0ca3d4c1-b9e5-4443-8102-7739602cbd2f nodeName:}" failed. No retries permitted until 2025-10-09 13:50:21.038376215 +0000 UTC m=+1496.812167254 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data") pod "rabbitmq-server-0" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f") : configmap "rabbitmq-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.057035 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.153409 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsmzk\" (UniqueName: \"kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk\") pod \"novaapi3399-account-delete-l75cx\" (UID: \"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2\") " pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.188327 4762 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Oct 09 13:50:19 crc kubenswrapper[4762]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 09 13:50:19 crc kubenswrapper[4762]: + source /usr/local/bin/container-scripts/functions Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNBridge=br-int Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNRemote=tcp:localhost:6642 Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNEncapType=geneve Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNAvailabilityZones= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ EnableChassisAsGateway=true Oct 09 13:50:19 crc kubenswrapper[4762]: ++ PhysicalNetworks= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNHostName= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 09 13:50:19 crc kubenswrapper[4762]: ++ ovs_dir=/var/lib/openvswitch Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 09 13:50:19 crc kubenswrapper[4762]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + cleanup_ovsdb_server_semaphore Oct 09 13:50:19 crc kubenswrapper[4762]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 09 13:50:19 crc kubenswrapper[4762]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-4rsdz" message=< Oct 09 13:50:19 crc kubenswrapper[4762]: Exiting ovsdb-server (5) [ OK ] Oct 09 13:50:19 crc kubenswrapper[4762]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 09 13:50:19 crc kubenswrapper[4762]: + source /usr/local/bin/container-scripts/functions Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNBridge=br-int Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNRemote=tcp:localhost:6642 Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNEncapType=geneve Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNAvailabilityZones= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ EnableChassisAsGateway=true Oct 09 13:50:19 crc kubenswrapper[4762]: ++ PhysicalNetworks= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNHostName= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 09 13:50:19 crc kubenswrapper[4762]: ++ ovs_dir=/var/lib/openvswitch Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 09 13:50:19 crc kubenswrapper[4762]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + cleanup_ovsdb_server_semaphore Oct 09 13:50:19 crc kubenswrapper[4762]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 09 13:50:19 crc kubenswrapper[4762]: > Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.188378 4762 kuberuntime_container.go:691] "PreStop hook failed" err=< Oct 09 13:50:19 crc kubenswrapper[4762]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 09 13:50:19 crc kubenswrapper[4762]: + source /usr/local/bin/container-scripts/functions Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNBridge=br-int Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNRemote=tcp:localhost:6642 Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNEncapType=geneve Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNAvailabilityZones= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ EnableChassisAsGateway=true Oct 09 13:50:19 crc kubenswrapper[4762]: ++ PhysicalNetworks= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ OVNHostName= Oct 09 13:50:19 crc kubenswrapper[4762]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 09 13:50:19 crc kubenswrapper[4762]: ++ ovs_dir=/var/lib/openvswitch Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 09 13:50:19 crc kubenswrapper[4762]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 09 13:50:19 crc kubenswrapper[4762]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + sleep 0.5 Oct 09 13:50:19 crc kubenswrapper[4762]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 09 13:50:19 crc kubenswrapper[4762]: + cleanup_ovsdb_server_semaphore Oct 09 13:50:19 crc kubenswrapper[4762]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 09 13:50:19 crc kubenswrapper[4762]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 09 13:50:19 crc kubenswrapper[4762]: > pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" containerID="cri-o://8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.188431 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" containerID="cri-o://8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" gracePeriod=29 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.227495 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsmzk\" (UniqueName: \"kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk\") pod \"novaapi3399-account-delete-l75cx\" (UID: \"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2\") " pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.260138 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.260182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.260258 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.260338 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.260404 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:21.26038827 +0000 UTC m=+1497.034179309 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : secret "barbican-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.260822 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.260848 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:21.260839963 +0000 UTC m=+1497.034631002 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : secret "barbican-config-data" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.266763 4762 projected.go:194] Error preparing data for projected volume kube-api-access-zf8xq for pod openstack/barbican-keystone-listener-577955c56b-wlqdm: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.266826 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:21.266807659 +0000 UTC m=+1497.040598698 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-zf8xq" (UniqueName: "kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.308958 4762 generic.go:334] "Generic (PLEG): container finished" podID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.333048 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" containerID="cri-o://6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" gracePeriod=29 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.345415 4762 generic.go:334] "Generic (PLEG): container finished" podID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerID="468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.351074 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_73168d57-30d8-4389-aa93-cacc6b07f705/ovsdbserver-nb/0.log" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.351110 4762 generic.go:334] "Generic (PLEG): container finished" podID="73168d57-30d8-4389-aa93-cacc6b07f705" containerID="a8a5046654c8bc5cfa5fef4fa4b6f0c7fc095d9255b7f318a934a8f57d41e38e" exitCode=2 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.351125 4762 generic.go:334] "Generic (PLEG): container finished" podID="73168d57-30d8-4389-aa93-cacc6b07f705" containerID="cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" exitCode=143 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.352995 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de9e8701-0980-4e1d-beb6-bc897f6a3e5f/ovsdbserver-sb/0.log" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.353023 4762 generic.go:334] "Generic (PLEG): container finished" podID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerID="bfd114b69745f91fa863504ab2154e63a593800c553e012f664f20178ef3e182" exitCode=2 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.353037 4762 generic.go:334] "Generic (PLEG): container finished" podID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerID="0298fedbb6423e6e1f4c3a0b9c472563bf0d4a5e060e57f78e885a92bc20aaf1" exitCode=143 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.362877 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.371680 4762 generic.go:334] "Generic (PLEG): container finished" podID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerID="be1b69b00d7fde9a938acf59f47549f46af90d5b80b3f6b68e63608f6eeeefc0" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.373485 4762 projected.go:194] Error preparing data for projected volume kube-api-access-j9sqm for pod openstack/barbican-api-6cd5b6946b-r2c8l: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.373547 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:21.373528594 +0000 UTC m=+1497.147319633 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-j9sqm" (UniqueName: "kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.387244 4762 generic.go:334] "Generic (PLEG): container finished" podID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerID="3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.418344 4762 generic.go:334] "Generic (PLEG): container finished" podID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerID="00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3" exitCode=143 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.443713 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444025 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444034 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444042 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444050 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444056 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444063 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444071 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444077 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.444084 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95" exitCode=0 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.523497 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44b61157-b706-44e2-9ca9-a5709e68f92c" path="/var/lib/kubelet/pods/44b61157-b706-44e2-9ca9-a5709e68f92c/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.524291 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bba279a-35cf-4a4e-8632-2098cad8fa08" path="/var/lib/kubelet/pods/5bba279a-35cf-4a4e-8632-2098cad8fa08/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.525159 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d" path="/var/lib/kubelet/pods/5e1f82ca-fcd6-4a8f-ae63-a5d0f9c5917d/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.526630 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf35063-94f0-4dd9-9282-060a7d5360cc" path="/var/lib/kubelet/pods/8cf35063-94f0-4dd9-9282-060a7d5360cc/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.527897 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2acc43c-4777-4ee7-8cd9-1314007c0be8" path="/var/lib/kubelet/pods/f2acc43c-4777-4ee7-8cd9-1314007c0be8/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.528813 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ef7222-071d-403b-8356-7645662e1226" path="/var/lib/kubelet/pods/f8ef7222-071d-403b-8356-7645662e1226/volumes" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529756 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerDied","Data":"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529785 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerDied","Data":"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529825 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529836 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529847 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerDied","Data":"a8a5046654c8bc5cfa5fef4fa4b6f0c7fc095d9255b7f318a934a8f57d41e38e"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.530085 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7ff674b5d8-5r7qh" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-log" containerID="cri-o://23815f5a561063d232341e5c1bdd222c0042c43c875ca3fd331afec3554cb11f" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.530406 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="dnsmasq-dns" containerID="cri-o://9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b" gracePeriod=10 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.530467 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7ff674b5d8-5r7qh" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-api" containerID="cri-o://75fb11579ca65d227b26b52153b7cd447fed5fcfb5bfc704642e8c32edf4fd96" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.532132 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-log" containerID="cri-o://f76d55bec71856953e9258a33e1c4393f28326a0fc15237e8dc359904b57e3fa" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.532486 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-httpd" containerID="cri-o://e1cf42e1e55e57e01124a220f5977d775fad1808eb503cb4e273e0937ebaa12d" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.529861 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerDied","Data":"cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548328 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerDied","Data":"bfd114b69745f91fa863504ab2154e63a593800c553e012f664f20178ef3e182"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548579 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerDied","Data":"0298fedbb6423e6e1f4c3a0b9c472563bf0d4a5e060e57f78e885a92bc20aaf1"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548600 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-7q5p9"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548623 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-7q5p9"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548940 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerStarted","Data":"8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548960 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-fbmj6"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548975 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-fbmj6"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.548988 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-lh6rx"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549208 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-lh6rx"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549238 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerDied","Data":"be1b69b00d7fde9a938acf59f47549f46af90d5b80b3f6b68e63608f6eeeefc0"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr" event={"ID":"3d5e35ba-6450-49d9-907a-8a4f879a1b0f","Type":"ContainerDied","Data":"3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549381 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerDied","Data":"00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549399 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549418 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549620 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549833 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549851 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549863 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b30a-account-create-ctfdm"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.549879 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b30a-account-create-ctfdm"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550523 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550545 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550565 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550753 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550772 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550784 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.550795 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95"} Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.605502 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.690327 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="rabbitmq" containerID="cri-o://f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" gracePeriod=604800 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.692674 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.692963 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-log" containerID="cri-o://2d7aba0bb4082b2fef3387ae71a6137c2f97c32898d88c293c1602e2961e85cb" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.693484 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-api" containerID="cri-o://42df5b43b4196e95eec2dbdedd25a25e5ecb0e0d6a17528ae0a9b5af3b08a452" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.708760 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-z9z8b"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.796876 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-z9z8b"] Oct 09 13:50:19 crc kubenswrapper[4762]: E1009 13:50:19.860436 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb69763f1_c5df_4f8c_9fb8_3d7f5ac8f181.slice/crio-conmon-00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1477cefa_db6e_45d7_adb1_d7c1369a3610.slice/crio-8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95.scope\": RecentStats: unable to find data in memory cache]" Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.919981 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.936486 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-7662-account-create-zsqv2"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.951732 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-7662-account-create-zsqv2"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.963543 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.964145 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" containerID="cri-o://c97286ccff2b823606d77c2401198e8d96d31e2569c5791d38c297fc823abe2b" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.964281 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" containerID="cri-o://e126396d1ff84e0d194527868fb00994e52bf6887b97b8770716230bf623f284" gracePeriod=30 Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.971273 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:50:19 crc kubenswrapper[4762]: I1009 13:50:19.995693 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-l6zst"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.006108 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.006451 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-68c6fd668c-fdnbq" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker-log" containerID="cri-o://034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.007001 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-68c6fd668c-fdnbq" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker" containerID="cri-o://fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.030832 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.030902 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data podName:c3841f71-5204-469f-b755-e030281725d1 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:22.030882645 +0000 UTC m=+1497.804673764 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data") pod "rabbitmq-cell1-server-0" (UID: "c3841f71-5204-469f-b755-e030281725d1") : configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.059840 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-l6zst"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.077164 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.158123 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.158608 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-86957c694-98m78" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener-log" containerID="cri-o://41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.159247 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-86957c694-98m78" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener" containerID="cri-o://7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.253303 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="galera" containerID="cri-o://1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.277337 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-577955c56b-wlqdm"] Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.279132 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data kube-api-access-zf8xq], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" podUID="8d8907fd-9553-4758-8e14-a7f607ed4f34" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.279803 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.292697 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr" Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.293243 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591 is running failed: container process not found" containerID="cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.293356 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-d6ff-account-create-zwpvv"] Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.298371 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591 is running failed: container process not found" containerID="cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.299539 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591 is running failed: container process not found" containerID="cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.299580 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="ovsdbserver-nb" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.305820 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-d6ff-account-create-zwpvv"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.319730 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.328412 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-df85s_39780bc9-4ec7-4578-b64e-40c2a1bba06c/openstack-network-exporter/0.log" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.328523 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.331229 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-q674c"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.342181 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_73168d57-30d8-4389-aa93-cacc6b07f705/ovsdbserver-nb/0.log" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.342266 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.353056 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-2601-account-create-8jl77"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.353661 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.353908 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.353936 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.353979 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.354000 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.354264 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5wkw\" (UniqueName: \"kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.354298 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle\") pod \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\" (UID: \"3d5e35ba-6450-49d9-907a-8a4f879a1b0f\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.355970 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.356041 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run" (OuterVolumeSpecName: "var-run") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.357089 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.357970 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts" (OuterVolumeSpecName: "scripts") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.369029 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw" (OuterVolumeSpecName: "kube-api-access-t5wkw") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "kube-api-access-t5wkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.372151 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.377011 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-q674c"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.399413 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-2601-account-create-8jl77"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.424950 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6cd5b6946b-r2c8l"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.426933 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 13:50:20 crc kubenswrapper[4762]: E1009 13:50:20.428281 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data kube-api-access-j9sqm], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/barbican-api-6cd5b6946b-r2c8l" podUID="b797ab80-d2f5-427c-b305-0aaed4e06e83" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.434221 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.456274 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.456335 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.456354 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-r4x7g"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.456366 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.456617 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.457838 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.457866 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f92mx\" (UniqueName: \"kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.457893 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cnq5\" (UniqueName: \"kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.457919 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.457971 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458002 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458029 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458061 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458102 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458125 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458146 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458179 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458216 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnsjf\" (UniqueName: \"kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458235 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir\") pod \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\" (UID: \"39780bc9-4ec7-4578-b64e-40c2a1bba06c\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458266 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458359 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts\") pod \"73168d57-30d8-4389-aa93-cacc6b07f705\" (UID: \"73168d57-30d8-4389-aa93-cacc6b07f705\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.458390 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb\") pod \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\" (UID: \"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459710 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config" (OuterVolumeSpecName: "config") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459762 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459802 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459826 4762 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459838 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459849 4762 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459858 4762 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459866 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.459876 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5wkw\" (UniqueName: \"kubernetes.io/projected/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-kube-api-access-t5wkw\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.466738 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf" (OuterVolumeSpecName: "kube-api-access-jnsjf") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "kube-api-access-jnsjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.473044 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config" (OuterVolumeSpecName: "config") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.477387 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.477843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts" (OuterVolumeSpecName: "scripts") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.483877 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx" (OuterVolumeSpecName: "kube-api-access-f92mx") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "kube-api-access-f92mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.500758 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.501135 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c78979758-qtqdp" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api-log" containerID="cri-o://940bb6da909be1f785661405cf69093008229cb06790274277183a2ddc92df99" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.501918 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c78979758-qtqdp" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api" containerID="cri-o://fc6d0ce7f1b2e1335bd5df1ffae809fadabf2813cdfe1e329a8458d677bea605" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.506900 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.512092 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5" (OuterVolumeSpecName: "kube-api-access-8cnq5") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "kube-api-access-8cnq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.512175 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-r4x7g"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.527592 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-52a6-account-create-472b8"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.563700 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9j68j\" (UniqueName: \"kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j\") pod \"1862d6d3-5d91-47cf-8b78-c0298569ee90\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.563803 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle\") pod \"1862d6d3-5d91-47cf-8b78-c0298569ee90\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.563837 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret\") pod \"1862d6d3-5d91-47cf-8b78-c0298569ee90\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.563913 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config\") pod \"1862d6d3-5d91-47cf-8b78-c0298569ee90\" (UID: \"1862d6d3-5d91-47cf-8b78-c0298569ee90\") " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.567821 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39780bc9-4ec7-4578-b64e-40c2a1bba06c-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.568410 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnsjf\" (UniqueName: \"kubernetes.io/projected/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-kube-api-access-jnsjf\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569687 4762 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/39780bc9-4ec7-4578-b64e-40c2a1bba06c-ovs-rundir\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569709 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569722 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569736 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f92mx\" (UniqueName: \"kubernetes.io/projected/73168d57-30d8-4389-aa93-cacc6b07f705-kube-api-access-f92mx\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569748 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cnq5\" (UniqueName: \"kubernetes.io/projected/39780bc9-4ec7-4578-b64e-40c2a1bba06c-kube-api-access-8cnq5\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569760 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73168d57-30d8-4389-aa93-cacc6b07f705-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.569792 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.594167 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-snp5x"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.597784 4762 generic.go:334] "Generic (PLEG): container finished" podID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerID="034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.597850 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerDied","Data":"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.600763 4762 generic.go:334] "Generic (PLEG): container finished" podID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerID="2d7aba0bb4082b2fef3387ae71a6137c2f97c32898d88c293c1602e2961e85cb" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.600819 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerDied","Data":"2d7aba0bb4082b2fef3387ae71a6137c2f97c32898d88c293c1602e2961e85cb"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.604280 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_73168d57-30d8-4389-aa93-cacc6b07f705/ovsdbserver-nb/0.log" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.604348 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"73168d57-30d8-4389-aa93-cacc6b07f705","Type":"ContainerDied","Data":"5e79c6927b095750dce54ac6928fba74a4204a733b4c09a34ff738ab33af6f0a"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.604381 4762 scope.go:117] "RemoveContainer" containerID="a8a5046654c8bc5cfa5fef4fa4b6f0c7fc095d9255b7f318a934a8f57d41e38e" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.604515 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.608804 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j" (OuterVolumeSpecName: "kube-api-access-9j68j") pod "1862d6d3-5d91-47cf-8b78-c0298569ee90" (UID: "1862d6d3-5d91-47cf-8b78-c0298569ee90"). InnerVolumeSpecName "kube-api-access-9j68j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.611263 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.621927 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1fff-account-create-vhfsm"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630661 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0" exitCode=0 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630700 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62" exitCode=0 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630710 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad" exitCode=0 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630718 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80" exitCode=0 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630778 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630811 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630823 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.630836 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.633353 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-snp5x"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.638304 4762 generic.go:334] "Generic (PLEG): container finished" podID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerID="f76d55bec71856953e9258a33e1c4393f28326a0fc15237e8dc359904b57e3fa" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.638396 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerDied","Data":"f76d55bec71856953e9258a33e1c4393f28326a0fc15237e8dc359904b57e3fa"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.640545 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-52a6-account-create-472b8"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.652347 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1fff-account-create-vhfsm"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.644253 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.644188 4762 generic.go:334] "Generic (PLEG): container finished" podID="1862d6d3-5d91-47cf-8b78-c0298569ee90" containerID="4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e" exitCode=137 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.658065 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-xdg42"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.658216 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7662-account-delete-9tfgj" event={"ID":"80456c7c-9571-4520-989a-53654daad82c","Type":"ContainerStarted","Data":"72b282693fc7498ce520077993bf9d493c57c7d21a192fd29ccc1e3fd2789f3d"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.663292 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerStarted","Data":"9df87f9b1f0f83b44995075ef8017f82960207c724b59d5dcdfd9780e5f0641c"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.673948 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.675571 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9j68j\" (UniqueName: \"kubernetes.io/projected/1862d6d3-5d91-47cf-8b78-c0298569ee90-kube-api-access-9j68j\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.674179 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-xdg42"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.682322 4762 generic.go:334] "Generic (PLEG): container finished" podID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerID="23815f5a561063d232341e5c1bdd222c0042c43c875ca3fd331afec3554cb11f" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.683525 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerDied","Data":"23815f5a561063d232341e5c1bdd222c0042c43c875ca3fd331afec3554cb11f"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.693378 4762 generic.go:334] "Generic (PLEG): container finished" podID="9f119cc4-119d-4535-8abd-fe380d546595" containerID="41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.693457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerDied","Data":"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.699572 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-df85s_39780bc9-4ec7-4578-b64e-40c2a1bba06c/openstack-network-exporter/0.log" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.699683 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df85s" event={"ID":"39780bc9-4ec7-4578-b64e-40c2a1bba06c","Type":"ContainerDied","Data":"e78b96ed754c9e223cf93c384cfd32a3c3239d321ebceaf3c67c3080ef7e0eba"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.699772 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df85s" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.704029 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.711065 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-272a-account-create-qwhwd"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.718698 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.723879 4762 generic.go:334] "Generic (PLEG): container finished" podID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerID="c97286ccff2b823606d77c2401198e8d96d31e2569c5791d38c297fc823abe2b" exitCode=143 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.723956 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerDied","Data":"c97286ccff2b823606d77c2401198e8d96d31e2569c5791d38c297fc823abe2b"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.726777 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-272a-account-create-qwhwd"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.734392 4762 generic.go:334] "Generic (PLEG): container finished" podID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerID="9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b" exitCode=0 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.734485 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" event={"ID":"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7","Type":"ContainerDied","Data":"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.734512 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" event={"ID":"2fc7e86f-4e77-45a4-a90c-6b06d4907ca7","Type":"ContainerDied","Data":"8d20e820d1db03907c6e36d4df28f97f67b91d5a9d6fc71befa90aefcfc61daa"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.735066 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-77jpt" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.736176 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.739522 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-tz9c8"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.748805 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "3d5e35ba-6450-49d9-907a-8a4f879a1b0f" (UID: "3d5e35ba-6450-49d9-907a-8a4f879a1b0f"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.751136 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.752093 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nknqr" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.752716 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.752740 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nknqr" event={"ID":"3d5e35ba-6450-49d9-907a-8a4f879a1b0f","Type":"ContainerDied","Data":"40ab3abaf5c944fa37f1a8226f9533563b7062110e452f079c2892753e6cee7a"} Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.758309 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.764429 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-tz9c8"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.773920 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-3399-account-create-5j8mn"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.787731 4762 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.787772 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d5e35ba-6450-49d9-907a-8a4f879a1b0f-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.787784 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.799893 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-3399-account-create-5j8mn"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.838430 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1862d6d3-5d91-47cf-8b78-c0298569ee90" (UID: "1862d6d3-5d91-47cf-8b78-c0298569ee90"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.845248 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.849436 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.889310 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.889339 4762 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.891696 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.895824 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://280e1070b26b6e461abd511c142f62c2fe13a17c995043bb5a94a0b15d693e00" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.904699 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.905497 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e56a40fb-847a-4e59-981b-369559466cb1" containerName="nova-scheduler-scheduler" containerID="cri-o://20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" gracePeriod=30 Oct 09 13:50:20 crc kubenswrapper[4762]: I1009 13:50:20.916371 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.014186 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.021518 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.023442 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c5c8d1-2d20-4747-91ff-703b4fa7d1ac" path="/var/lib/kubelet/pods/06c5c8d1-2d20-4747-91ff-703b4fa7d1ac/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.024406 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11408afa-4f7f-46b2-b499-91ab1e966c35" path="/var/lib/kubelet/pods/11408afa-4f7f-46b2-b499-91ab1e966c35/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.027413 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ee54a3-2cd6-4203-a472-9274c1ea9018" path="/var/lib/kubelet/pods/15ee54a3-2cd6-4203-a472-9274c1ea9018/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.045444 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21e8bb8c-2024-4cc3-8887-17e4b8794601" path="/var/lib/kubelet/pods/21e8bb8c-2024-4cc3-8887-17e4b8794601/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.046143 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38d0e5fa-4372-413b-8a13-b3fb22f22463" path="/var/lib/kubelet/pods/38d0e5fa-4372-413b-8a13-b3fb22f22463/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.046741 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c047522-42b9-40b8-89df-020222968ff6" path="/var/lib/kubelet/pods/3c047522-42b9-40b8-89df-020222968ff6/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.047367 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6d85af-3e7b-4f48-a462-e365a2dfa511" path="/var/lib/kubelet/pods/3e6d85af-3e7b-4f48-a462-e365a2dfa511/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.055453 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="rabbitmq" containerID="cri-o://7e81d9e72b263dd6748137f8d9a685b369c4a166ceded69631770333e18be646" gracePeriod=604800 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.059662 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5734e640-439a-45ff-8126-48eba63b8d4d" path="/var/lib/kubelet/pods/5734e640-439a-45ff-8126-48eba63b8d4d/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.060326 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="761002a8-1d69-4d8e-b2ff-78e1b853b54d" path="/var/lib/kubelet/pods/761002a8-1d69-4d8e-b2ff-78e1b853b54d/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.060943 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82614827-1884-4faa-9f97-2fd96ec29941" path="/var/lib/kubelet/pods/82614827-1884-4faa-9f97-2fd96ec29941/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.066566 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.070581 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="947cbe0a-2c1a-490f-b5c3-760760edeb29" path="/var/lib/kubelet/pods/947cbe0a-2c1a-490f-b5c3-760760edeb29/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.071955 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0d76f37-0b45-48f1-bbc1-c70ab359ee4f" path="/var/lib/kubelet/pods/a0d76f37-0b45-48f1-bbc1-c70ab359ee4f/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.072819 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b026cfa5-4949-45d8-9d57-ec509cc84936" path="/var/lib/kubelet/pods/b026cfa5-4949-45d8-9d57-ec509cc84936/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.073998 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de566a3c-21e4-4b4d-b555-2b040ea2b719" path="/var/lib/kubelet/pods/de566a3c-21e4-4b4d-b555-2b040ea2b719/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.076376 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deef8d3d-5ef0-44fd-94db-aa27c317b167" path="/var/lib/kubelet/pods/deef8d3d-5ef0-44fd-94db-aa27c317b167/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.078249 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5d9438-f638-4ddb-9c18-56bad695b73f" path="/var/lib/kubelet/pods/ea5d9438-f638-4ddb-9c18-56bad695b73f/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.080466 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1656cb3-3fd7-49fd-8434-101406b98d60" path="/var/lib/kubelet/pods/f1656cb3-3fd7-49fd-8434-101406b98d60/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.083627 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f63ec52b-58fb-4fc2-97ac-2752df71322a" path="/var/lib/kubelet/pods/f63ec52b-58fb-4fc2-97ac-2752df71322a/volumes" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.097044 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.100100 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.100153 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.100174 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.100258 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.100340 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data podName:0ca3d4c1-b9e5-4443-8102-7739602cbd2f nodeName:}" failed. No retries permitted until 2025-10-09 13:50:25.1003194 +0000 UTC m=+1500.874110439 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data") pod "rabbitmq-server-0" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f") : configmap "rabbitmq-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.134303 4762 scope.go:117] "RemoveContainer" containerID="cb88f4bb766dd77797353f3b4da21fe59806a861fbfa0151f974260158e77591" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138344 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cx7gg"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138378 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138402 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-cx7gg"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138428 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vtlrb"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138440 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vtlrb"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138451 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.138684 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerName="nova-cell0-conductor-conductor" containerID="cri-o://82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" gracePeriod=30 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.141310 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" containerID="cri-o://69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" gracePeriod=30 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.144009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.157564 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.172015 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.182017 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de9e8701-0980-4e1d-beb6-bc897f6a3e5f/ovsdbserver-sb/0.log" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.182080 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.194550 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.201923 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.204670 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.204758 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle\") pod \"b797ab80-d2f5-427c-b305-0aaed4e06e83\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.204817 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom\") pod \"b797ab80-d2f5-427c-b305-0aaed4e06e83\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.204862 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs\") pod \"b797ab80-d2f5-427c-b305-0aaed4e06e83\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.204918 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205054 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205087 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205188 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205244 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205287 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs\") pod \"8d8907fd-9553-4758-8e14-a7f607ed4f34\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205372 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs\") pod \"b797ab80-d2f5-427c-b305-0aaed4e06e83\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205397 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205478 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle\") pod \"8d8907fd-9553-4758-8e14-a7f607ed4f34\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205535 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs\") pod \"b797ab80-d2f5-427c-b305-0aaed4e06e83\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205602 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgkz4\" (UniqueName: \"kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4\") pod \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\" (UID: \"de9e8701-0980-4e1d-beb6-bc897f6a3e5f\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.205656 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom\") pod \"8d8907fd-9553-4758-8e14-a7f607ed4f34\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.206435 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.211369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "73168d57-30d8-4389-aa93-cacc6b07f705" (UID: "73168d57-30d8-4389-aa93-cacc6b07f705"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.211380 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.211769 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs" (OuterVolumeSpecName: "logs") pod "b797ab80-d2f5-427c-b305-0aaed4e06e83" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.213918 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs" (OuterVolumeSpecName: "logs") pod "8d8907fd-9553-4758-8e14-a7f607ed4f34" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.215104 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts" (OuterVolumeSpecName: "scripts") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.216594 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.222260 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config" (OuterVolumeSpecName: "config") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.230652 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.231244 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b797ab80-d2f5-427c-b305-0aaed4e06e83" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.233338 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b797ab80-d2f5-427c-b305-0aaed4e06e83" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.233723 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8d8907fd-9553-4758-8e14-a7f607ed4f34" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.238233 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d8907fd-9553-4758-8e14-a7f607ed4f34" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.240541 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.240991 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6d457f6487-fjznb" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-httpd" containerID="cri-o://b490b4f39e6ea2a1af800e6ebc54b51119c9ebfcf3135bde77ab2dd308d6f0a1" gracePeriod=30 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.241956 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6d457f6487-fjznb" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-server" containerID="cri-o://8f98e1f5816b4f5c7f33e6e177e326aadaed2d9b0b12f6488966a24db673528b" gracePeriod=30 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.243043 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b797ab80-d2f5-427c-b305-0aaed4e06e83" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.249814 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.250840 4762 scope.go:117] "RemoveContainer" containerID="4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.259120 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b797ab80-d2f5-427c-b305-0aaed4e06e83" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.261692 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.266140 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4" (OuterVolumeSpecName: "kube-api-access-lgkz4") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "kube-api-access-lgkz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.266364 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1862d6d3-5d91-47cf-8b78-c0298569ee90" (UID: "1862d6d3-5d91-47cf-8b78-c0298569ee90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.278268 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-nknqr"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308187 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308257 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308365 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") pod \"barbican-keystone-listener-577955c56b-wlqdm\" (UID: \"8d8907fd-9553-4758-8e14-a7f607ed4f34\") " pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308453 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.308459 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.308557 4762 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.308568 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:25.308543595 +0000 UTC m=+1501.082334664 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : secret "barbican-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.308612 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:25.308594916 +0000 UTC m=+1501.082386025 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : secret "barbican-config-data" not found Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308759 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308468 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308837 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b797ab80-d2f5-427c-b305-0aaed4e06e83-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308849 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308860 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308884 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308896 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d8907fd-9553-4758-8e14-a7f607ed4f34-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308905 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/73168d57-30d8-4389-aa93-cacc6b07f705-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308915 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308924 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308932 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308941 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308950 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308960 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgkz4\" (UniqueName: \"kubernetes.io/projected/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-kube-api-access-lgkz4\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.308970 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.309602 4762 scope.go:117] "RemoveContainer" containerID="4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.313025 4762 projected.go:194] Error preparing data for projected volume kube-api-access-zf8xq for pod openstack/barbican-keystone-listener-577955c56b-wlqdm: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.313099 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq podName:8d8907fd-9553-4758-8e14-a7f607ed4f34 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:25.313076923 +0000 UTC m=+1501.086868022 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-zf8xq" (UniqueName: "kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq") pod "barbican-keystone-listener-577955c56b-wlqdm" (UID: "8d8907fd-9553-4758-8e14-a7f607ed4f34") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.313205 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e\": container with ID starting with 4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e not found: ID does not exist" containerID="4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.313239 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e"} err="failed to get container status \"4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e\": rpc error: code = NotFound desc = could not find container \"4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e\": container with ID starting with 4df5c5e992cba5a503875e5ca7fa91f0471d8c3722c04f695117bfa01ff8843e not found: ID does not exist" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.313266 4762 scope.go:117] "RemoveContainer" containerID="832e14666bf13cb6c348c0af2b44c1f63087d249a79e7a68cc967f3268d56c21" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.324807 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config" (OuterVolumeSpecName: "config") pod "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" (UID: "2fc7e86f-4e77-45a4-a90c-6b06d4907ca7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.392709 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.411816 4762 scope.go:117] "RemoveContainer" containerID="9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.413586 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") pod \"barbican-api-6cd5b6946b-r2c8l\" (UID: \"b797ab80-d2f5-427c-b305-0aaed4e06e83\") " pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.413863 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.417692 4762 projected.go:194] Error preparing data for projected volume kube-api-access-j9sqm for pod openstack/barbican-api-6cd5b6946b-r2c8l: failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.417772 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm podName:b797ab80-d2f5-427c-b305-0aaed4e06e83 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:25.417751236 +0000 UTC m=+1501.191542335 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-j9sqm" (UniqueName: "kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm") pod "barbican-api-6cd5b6946b-r2c8l" (UID: "b797ab80-d2f5-427c-b305-0aaed4e06e83") : failed to fetch token: serviceaccounts "barbican-barbican" not found Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.421188 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-77jpt"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.441800 4762 scope.go:117] "RemoveContainer" containerID="17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.586624 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.589693 4762 scope.go:117] "RemoveContainer" containerID="9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.593993 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b\": container with ID starting with 9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b not found: ID does not exist" containerID="9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.594147 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b"} err="failed to get container status \"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b\": rpc error: code = NotFound desc = could not find container \"9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b\": container with ID starting with 9402fbdb8687b707721113b87e240a55575312969cee38eb4fd38e01f06c6a7b not found: ID does not exist" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.594306 4762 scope.go:117] "RemoveContainer" containerID="17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c" Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.595459 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c\": container with ID starting with 17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c not found: ID does not exist" containerID="17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.595512 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c"} err="failed to get container status \"17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c\": rpc error: code = NotFound desc = could not find container \"17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c\": container with ID starting with 17dc8fd70f9ef6100e3f92a81351eebf8591a1104c77b2be68fae39f183e2e0c not found: ID does not exist" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.595548 4762 scope.go:117] "RemoveContainer" containerID="3512052ad89cd6f81eb49dade5d8f066e313de741dc524a06c830f4ecbb4c187" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.617997 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.632318 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "39780bc9-4ec7-4578-b64e-40c2a1bba06c" (UID: "39780bc9-4ec7-4578-b64e-40c2a1bba06c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.644112 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.195:6080/vnc_lite.html\": dial tcp 10.217.0.195:6080: connect: connection refused" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.721601 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39780bc9-4ec7-4578-b64e-40c2a1bba06c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.765675 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.779160 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1862d6d3-5d91-47cf-8b78-c0298569ee90" (UID: "1862d6d3-5d91-47cf-8b78-c0298569ee90"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.825225 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.825265 4762 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1862d6d3-5d91-47cf-8b78-c0298569ee90-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.848407 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1fff-account-delete-zn6rx" event={"ID":"ffc2314d-5e5f-45e6-9134-9879b35e0f2d","Type":"ContainerStarted","Data":"9d136192d704050264dd91b7462355a997b9f1a3379b4b930bd040e2bddfa3be"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.863969 4762 generic.go:334] "Generic (PLEG): container finished" podID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerID="940bb6da909be1f785661405cf69093008229cb06790274277183a2ddc92df99" exitCode=143 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.864102 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerDied","Data":"940bb6da909be1f785661405cf69093008229cb06790274277183a2ddc92df99"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.875231 4762 generic.go:334] "Generic (PLEG): container finished" podID="b8e2afde-2991-4771-ae61-dc363b0d5f04" containerID="629e7b66e2c426e986e44b3ca1a61491ba782b0c77b974188f502e357ca696f7" exitCode=0 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.875317 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb30a-account-delete-hlx4d" event={"ID":"b8e2afde-2991-4771-ae61-dc363b0d5f04","Type":"ContainerDied","Data":"629e7b66e2c426e986e44b3ca1a61491ba782b0c77b974188f502e357ca696f7"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.875350 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb30a-account-delete-hlx4d" event={"ID":"b8e2afde-2991-4771-ae61-dc363b0d5f04","Type":"ContainerStarted","Data":"a4fdc9d786e14f92bf747bda03030ae7ffa5bb30e8acecebb9cc178d28d165dd"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.892005 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.897565 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementd6ff-account-delete-xl4vt" event={"ID":"ecf33027-d452-4ccd-a23f-52697374958c","Type":"ContainerStarted","Data":"c62eadd0a9d60975e6fc3e7ccbc2485bf73c659dc8bf2698fc5f06179af91bf1"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.899424 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican272a-account-delete-z2pw8" event={"ID":"d71b766d-302c-456b-ae8f-9befcb684b6a","Type":"ContainerStarted","Data":"7f068db3757ded425e864366a700a38dd636defb4a2c3ccfa66902df029f8ded"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.913663 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "de9e8701-0980-4e1d-beb6-bc897f6a3e5f" (UID: "de9e8701-0980-4e1d-beb6-bc897f6a3e5f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.925770 4762 generic.go:334] "Generic (PLEG): container finished" podID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerID="280e1070b26b6e461abd511c142f62c2fe13a17c995043bb5a94a0b15d693e00" exitCode=0 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.925841 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"aa137ef5-e1eb-4e54-a8e3-4a312e167837","Type":"ContainerDied","Data":"280e1070b26b6e461abd511c142f62c2fe13a17c995043bb5a94a0b15d693e00"} Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.930543 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.933156 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.933185 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9e8701-0980-4e1d-beb6-bc897f6a3e5f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.947389 4762 generic.go:334] "Generic (PLEG): container finished" podID="5b55cc23-05bb-4df9-9876-d725071b9838" containerID="8f98e1f5816b4f5c7f33e6e177e326aadaed2d9b0b12f6488966a24db673528b" exitCode=0 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.947430 4762 generic.go:334] "Generic (PLEG): container finished" podID="5b55cc23-05bb-4df9-9876-d725071b9838" containerID="b490b4f39e6ea2a1af800e6ebc54b51119c9ebfcf3135bde77ab2dd308d6f0a1" exitCode=0 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.947501 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerDied","Data":"8f98e1f5816b4f5c7f33e6e177e326aadaed2d9b0b12f6488966a24db673528b"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.947535 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerDied","Data":"b490b4f39e6ea2a1af800e6ebc54b51119c9ebfcf3135bde77ab2dd308d6f0a1"} Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.947567 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.960398 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Oct 09 13:50:21 crc kubenswrapper[4762]: E1009 13:50:21.960494 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="galera" Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.965758 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerStarted","Data":"36cb5740bfd3757c1db9d2f58e72c98daf1730cccfbc6e4f523e38cdf12df964"} Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.965916 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker-log" containerID="cri-o://9df87f9b1f0f83b44995075ef8017f82960207c724b59d5dcdfd9780e5f0641c" gracePeriod=30 Oct 09 13:50:21 crc kubenswrapper[4762]: I1009 13:50:21.977064 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker" containerID="cri-o://36cb5740bfd3757c1db9d2f58e72c98daf1730cccfbc6e4f523e38cdf12df964" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.010877 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" podStartSLOduration=6.010859174 podStartE2EDuration="6.010859174s" podCreationTimestamp="2025-10-09 13:50:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 13:50:21.998665614 +0000 UTC m=+1497.772456653" watchObservedRunningTime="2025-10-09 13:50:22.010859174 +0000 UTC m=+1497.784650213" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.042226 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7662-account-delete-9tfgj" event={"ID":"80456c7c-9571-4520-989a-53654daad82c","Type":"ContainerStarted","Data":"80e01643f8cc645bb4e16ae578ab16970af08ccbd3f364e20a9e06ba888c7cb9"} Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.043140 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.043195 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data podName:c3841f71-5204-469f-b755-e030281725d1 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:26.04318265 +0000 UTC m=+1501.816973689 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data") pod "rabbitmq-cell1-server-0" (UID: "c3841f71-5204-469f-b755-e030281725d1") : configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.066962 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi3399-account-delete-l75cx" event={"ID":"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2","Type":"ContainerStarted","Data":"f3bc8912e293d82c078972cfed0700047949ccde8ae327569544fba2a873164f"} Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.087139 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_de9e8701-0980-4e1d-beb6-bc897f6a3e5f/ovsdbserver-sb/0.log" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.087254 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cd5b6946b-r2c8l" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.088308 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.090381 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"de9e8701-0980-4e1d-beb6-bc897f6a3e5f","Type":"ContainerDied","Data":"afadb27054548ec5c1a1ea1a13baf54bdf344c50057a7d9b08e7e7f59856187c"} Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.098356 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-577955c56b-wlqdm" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.122294 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.163:8776/healthcheck\": dial tcp 10.217.0.163:8776: connect: connection refused" Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.381318 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.386023 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.390126 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:22 crc kubenswrapper[4762]: E1009 13:50:22.390222 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.449883 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.449966 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.172:9292/healthcheck\": dial tcp 10.217.0.172:9292: connect: connection refused" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.488450 4762 scope.go:117] "RemoveContainer" containerID="bfd114b69745f91fa863504ab2154e63a593800c553e012f664f20178ef3e182" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.608931 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.609215 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-central-agent" containerID="cri-o://5a2c557d48c0ddb55e75b1d77d66c330f8602fb5590b5f44bfac8794c89b9776" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.609357 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="proxy-httpd" containerID="cri-o://eb7446c781a3fc26e94e1a5d7268c99a91fefd167fad020d62497a265f46becd" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.609396 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="sg-core" containerID="cri-o://f4d38c90d6503eb2b13feadda565a6997a41189134eb578e55d77d77cecf5418" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.609428 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-notification-agent" containerID="cri-o://feaef4de6cce1bbb2d4303079a7194f8a547f09def8c966e05793d81fbd5d515" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.634312 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.634539 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="7c325595-23e7-465e-a533-181eef8ba528" containerName="kube-state-metrics" containerID="cri-o://85c1dff79b65efbb1d0077d4017bfcb9b84c3b899d4399ade4316429900aab80" gracePeriod=30 Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.688893 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.769418 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xn27\" (UniqueName: \"kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27\") pod \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.769517 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data\") pod \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.769550 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs\") pod \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.769688 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle\") pod \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.769784 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs\") pod \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\" (UID: \"aa137ef5-e1eb-4e54-a8e3-4a312e167837\") " Oct 09 13:50:22 crc kubenswrapper[4762]: I1009 13:50:22.779886 4762 scope.go:117] "RemoveContainer" containerID="0298fedbb6423e6e1f4c3a0b9c472563bf0d4a5e060e57f78e885a92bc20aaf1" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.818920 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27" (OuterVolumeSpecName: "kube-api-access-9xn27") pod "aa137ef5-e1eb-4e54-a8e3-4a312e167837" (UID: "aa137ef5-e1eb-4e54-a8e3-4a312e167837"). InnerVolumeSpecName "kube-api-access-9xn27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.842096 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.171:9292/healthcheck\": read tcp 10.217.0.2:34438->10.217.0.171:9292: read: connection reset by peer" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.842486 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.171:9292/healthcheck\": read tcp 10.217.0.2:34436->10.217.0.171:9292: read: connection reset by peer" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.872221 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xn27\" (UniqueName: \"kubernetes.io/projected/aa137ef5-e1eb-4e54-a8e3-4a312e167837-kube-api-access-9xn27\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.962393 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:22.962587 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="920bd70a-a807-4848-baf7-5bfc033838ee" containerName="memcached" containerID="cri-o://c8d8d117d7a7cc7c3b031a470b0ee1bca2341ab9daabe576a4e456084be8e7f2" gracePeriod=30 Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.059901 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.069797 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.113707 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.113764 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e56a40fb-847a-4e59-981b-369559466cb1" containerName="nova-scheduler-scheduler" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.131829 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data" (OuterVolumeSpecName: "config-data") pod "aa137ef5-e1eb-4e54-a8e3-4a312e167837" (UID: "aa137ef5-e1eb-4e54-a8e3-4a312e167837"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.149218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa137ef5-e1eb-4e54-a8e3-4a312e167837" (UID: "aa137ef5-e1eb-4e54-a8e3-4a312e167837"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.168406 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:40680->10.217.0.200:8775: read: connection reset by peer" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.169727 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:40670->10.217.0.200:8775: read: connection reset by peer" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.175583 4762 generic.go:334] "Generic (PLEG): container finished" podID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerID="75fb11579ca65d227b26b52153b7cd447fed5fcfb5bfc704642e8c32edf4fd96" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.193024 4762 generic.go:334] "Generic (PLEG): container finished" podID="7c325595-23e7-465e-a533-181eef8ba528" containerID="85c1dff79b65efbb1d0077d4017bfcb9b84c3b899d4399ade4316429900aab80" exitCode=2 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.196811 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "aa137ef5-e1eb-4e54-a8e3-4a312e167837" (UID: "aa137ef5-e1eb-4e54-a8e3-4a312e167837"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.204460 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "aa137ef5-e1eb-4e54-a8e3-4a312e167837" (UID: "aa137ef5-e1eb-4e54-a8e3-4a312e167837"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.223274 4762 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.223311 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.223320 4762 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.223330 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa137ef5-e1eb-4e54-a8e3-4a312e167837-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.277096 4762 generic.go:334] "Generic (PLEG): container finished" podID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerID="f4d38c90d6503eb2b13feadda565a6997a41189134eb578e55d77d77cecf5418" exitCode=2 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.294836 4762 generic.go:334] "Generic (PLEG): container finished" podID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerID="1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.311094 4762 generic.go:334] "Generic (PLEG): container finished" podID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerID="9df87f9b1f0f83b44995075ef8017f82960207c724b59d5dcdfd9780e5f0641c" exitCode=143 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.313801 4762 generic.go:334] "Generic (PLEG): container finished" podID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerID="c1be68916a3f81c7b6abc9ef288db0ad9f260fbe6b28ab389555e8e399229389" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.321254 4762 generic.go:334] "Generic (PLEG): container finished" podID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerID="13924ee637a2d8e6d84bc0168317a287fc78cd42d90586f3f1c1c46d4723c487" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.341892 4762 generic.go:334] "Generic (PLEG): container finished" podID="ffc2314d-5e5f-45e6-9134-9879b35e0f2d" containerID="200af99e4282b55cc024e1a91875d1cc9fe045ed4509ce54d68f1108ab53eb24" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.342337 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.345205 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.347513 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.347563 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.351090 4762 generic.go:334] "Generic (PLEG): container finished" podID="80456c7c-9571-4520-989a-53654daad82c" containerID="80e01643f8cc645bb4e16ae578ab16970af08ccbd3f364e20a9e06ba888c7cb9" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.388159 4762 generic.go:334] "Generic (PLEG): container finished" podID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerID="e1cf42e1e55e57e01124a220f5977d775fad1808eb503cb4e273e0937ebaa12d" exitCode=0 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.416831 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1862d6d3-5d91-47cf-8b78-c0298569ee90" path="/var/lib/kubelet/pods/1862d6d3-5d91-47cf-8b78-c0298569ee90/volumes" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.417489 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" path="/var/lib/kubelet/pods/2fc7e86f-4e77-45a4-a90c-6b06d4907ca7/volumes" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.418142 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" path="/var/lib/kubelet/pods/3d5e35ba-6450-49d9-907a-8a4f879a1b0f/volumes" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.419503 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a34ea94b-16a9-4de9-9179-a729ef91f7af" path="/var/lib/kubelet/pods/a34ea94b-16a9-4de9-9179-a729ef91f7af/volumes" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.420107 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8a82ea9-76b8-467c-970d-38cd752458bf" path="/var/lib/kubelet/pods/d8a82ea9-76b8-467c-970d-38cd752458bf/volumes" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.421555 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4cm5b"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422052 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-cgmfg"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422129 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerDied","Data":"75fb11579ca65d227b26b52153b7cd447fed5fcfb5bfc704642e8c32edf4fd96"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422276 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7c325595-23e7-465e-a533-181eef8ba528","Type":"ContainerDied","Data":"85c1dff79b65efbb1d0077d4017bfcb9b84c3b899d4399ade4316429900aab80"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422340 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerDied","Data":"f4d38c90d6503eb2b13feadda565a6997a41189134eb578e55d77d77cecf5418"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerDied","Data":"1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422470 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d457f6487-fjznb" event={"ID":"5b55cc23-05bb-4df9-9876-d725071b9838","Type":"ContainerDied","Data":"ebaf6d9d15a742a3028e27d7a8fada2a6b24cdf909945eca4a67d427756bb271"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422526 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebaf6d9d15a742a3028e27d7a8fada2a6b24cdf909945eca4a67d427756bb271" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422583 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4cm5b"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422665 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-cgmfg"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422735 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422839 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerDied","Data":"9df87f9b1f0f83b44995075ef8017f82960207c724b59d5dcdfd9780e5f0641c"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422910 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerDied","Data":"c1be68916a3f81c7b6abc9ef288db0ad9f260fbe6b28ab389555e8e399229389"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.422980 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4","Type":"ContainerDied","Data":"b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423046 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4c8cce903c4ed09773d7bbd94bdc23af95ac37c50f004640ea2036a8318c001" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423118 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerDied","Data":"13924ee637a2d8e6d84bc0168317a287fc78cd42d90586f3f1c1c46d4723c487"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423209 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423289 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1fff-account-delete-zn6rx" event={"ID":"ffc2314d-5e5f-45e6-9134-9879b35e0f2d","Type":"ContainerDied","Data":"200af99e4282b55cc024e1a91875d1cc9fe045ed4509ce54d68f1108ab53eb24"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423353 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7662-account-delete-9tfgj" event={"ID":"80456c7c-9571-4520-989a-53654daad82c","Type":"ContainerDied","Data":"80e01643f8cc645bb4e16ae578ab16970af08ccbd3f364e20a9e06ba888c7cb9"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423413 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-cwprf"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423472 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7662-account-delete-9tfgj" event={"ID":"80456c7c-9571-4520-989a-53654daad82c","Type":"ContainerDied","Data":"72b282693fc7498ce520077993bf9d493c57c7d21a192fd29ccc1e3fd2789f3d"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423528 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72b282693fc7498ce520077993bf9d493c57c7d21a192fd29ccc1e3fd2789f3d" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423583 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-cwprf"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423730 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb30a-account-delete-hlx4d" event={"ID":"b8e2afde-2991-4771-ae61-dc363b0d5f04","Type":"ContainerDied","Data":"a4fdc9d786e14f92bf747bda03030ae7ffa5bb30e8acecebb9cc178d28d165dd"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423790 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4fdc9d786e14f92bf747bda03030ae7ffa5bb30e8acecebb9cc178d28d165dd" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423850 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7a22-account-create-8jpv8"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423908 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerDied","Data":"e1cf42e1e55e57e01124a220f5977d775fad1808eb503cb4e273e0937ebaa12d"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.423970 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7a22-account-create-8jpv8"] Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.430765 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-98dbd4bdf-stq5g" podUID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" containerName="keystone-api" containerID="cri-o://041eae0fb2897099972d7b4151296f3413a47cd494bf2d693195499557b5f03d" gracePeriod=30 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.431550 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"aa137ef5-e1eb-4e54-a8e3-4a312e167837","Type":"ContainerDied","Data":"dc2d9907e7697a0cfb8df77856e5e1d951b85a3b4800983699c815ff36fc1ac7"} Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.431623 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.451894 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.453178 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.453562 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.453620 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.456514 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.473864 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.507057 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:23 crc kubenswrapper[4762]: E1009 13:50:23.507133 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.697813 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="galera" containerID="cri-o://f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd" gracePeriod=30 Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.951846 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c78979758-qtqdp" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:47190->10.217.0.159:9311: read: connection reset by peer" Oct 09 13:50:23 crc kubenswrapper[4762]: I1009 13:50:23.952243 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c78979758-qtqdp" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.159:9311/healthcheck\": read tcp 10.217.0.2:47200->10.217.0.159:9311: read: connection reset by peer" Oct 09 13:50:24 crc kubenswrapper[4762]: E1009 13:50:24.053800 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933 is running failed: container process not found" containerID="82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:24 crc kubenswrapper[4762]: E1009 13:50:24.054050 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933 is running failed: container process not found" containerID="82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:24 crc kubenswrapper[4762]: E1009 13:50:24.057526 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933 is running failed: container process not found" containerID="82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:24 crc kubenswrapper[4762]: E1009 13:50:24.057569 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerName="nova-cell0-conductor-conductor" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.453884 4762 generic.go:334] "Generic (PLEG): container finished" podID="920bd70a-a807-4848-baf7-5bfc033838ee" containerID="c8d8d117d7a7cc7c3b031a470b0ee1bca2341ab9daabe576a4e456084be8e7f2" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.454013 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"920bd70a-a807-4848-baf7-5bfc033838ee","Type":"ContainerDied","Data":"c8d8d117d7a7cc7c3b031a470b0ee1bca2341ab9daabe576a4e456084be8e7f2"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.472830 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7c325595-23e7-465e-a533-181eef8ba528","Type":"ContainerDied","Data":"dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.472881 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dad60dc8c73eb80969a9f41b0f35da6a3b68c7bd367028567a588081aebcba82" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.473766 4762 scope.go:117] "RemoveContainer" containerID="280e1070b26b6e461abd511c142f62c2fe13a17c995043bb5a94a0b15d693e00" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.511886 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.516402 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"96d5f387-4c72-4cc6-9776-56fcb49b8851","Type":"ContainerDied","Data":"24687b1f80f20771e10a0ca92079f324b5ab3c58e49b31d7149cb999c71606bf"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.517320 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24687b1f80f20771e10a0ca92079f324b5ab3c58e49b31d7149cb999c71606bf" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.528156 4762 generic.go:334] "Generic (PLEG): container finished" podID="e56a40fb-847a-4e59-981b-369559466cb1" containerID="20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.528248 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6cd5b6946b-r2c8l"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.528301 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e56a40fb-847a-4e59-981b-369559466cb1","Type":"ContainerDied","Data":"20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.528329 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e56a40fb-847a-4e59-981b-369559466cb1","Type":"ContainerDied","Data":"6696d15f88c8972c2b6bf5003a4c023a5e46e3892f11e4168f0b3b03660ceba5"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.528342 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6696d15f88c8972c2b6bf5003a4c023a5e46e3892f11e4168f0b3b03660ceba5" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.544284 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6cd5b6946b-r2c8l"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.548217 4762 generic.go:334] "Generic (PLEG): container finished" podID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerID="fc6d0ce7f1b2e1335bd5df1ffae809fadabf2813cdfe1e329a8458d677bea605" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.548282 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerDied","Data":"fc6d0ce7f1b2e1335bd5df1ffae809fadabf2813cdfe1e329a8458d677bea605"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.549305 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.549532 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561160 4762 generic.go:334] "Generic (PLEG): container finished" podID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerID="eb7446c781a3fc26e94e1a5d7268c99a91fefd167fad020d62497a265f46becd" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561191 4762 generic.go:334] "Generic (PLEG): container finished" podID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerID="feaef4de6cce1bbb2d4303079a7194f8a547f09def8c966e05793d81fbd5d515" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561201 4762 generic.go:334] "Generic (PLEG): container finished" podID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerID="5a2c557d48c0ddb55e75b1d77d66c330f8602fb5590b5f44bfac8794c89b9776" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561260 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerDied","Data":"eb7446c781a3fc26e94e1a5d7268c99a91fefd167fad020d62497a265f46becd"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561291 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerDied","Data":"feaef4de6cce1bbb2d4303079a7194f8a547f09def8c966e05793d81fbd5d515"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561306 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerDied","Data":"5a2c557d48c0ddb55e75b1d77d66c330f8602fb5590b5f44bfac8794c89b9776"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561317 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3","Type":"ContainerDied","Data":"1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.561328 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1452c60a3d5804de36f19f619606454c96fc01ec101e07cf4d46e717d3181ff7" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.562492 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-577955c56b-wlqdm"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.565235 4762 generic.go:334] "Generic (PLEG): container finished" podID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerID="42df5b43b4196e95eec2dbdedd25a25e5ecb0e0d6a17528ae0a9b5af3b08a452" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.565331 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerDied","Data":"42df5b43b4196e95eec2dbdedd25a25e5ecb0e0d6a17528ae0a9b5af3b08a452"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.565360 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f5b14562-fc3d-48fc-b83d-c12e5c9617c6","Type":"ContainerDied","Data":"eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.565374 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaa40289f1512941f40bf67704dcf5137c2ba018f41348ba8a91a74da288f78d" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.572280 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575499 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575627 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl47c\" (UniqueName: \"kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575689 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575884 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575919 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575951 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.575987 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.576023 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs\") pod \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\" (UID: \"a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.577436 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs" (OuterVolumeSpecName: "logs") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.577492 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-577955c56b-wlqdm"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.582591 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.591314 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder1fff-account-delete-zn6rx" event={"ID":"ffc2314d-5e5f-45e6-9134-9879b35e0f2d","Type":"ContainerDied","Data":"9d136192d704050264dd91b7462355a997b9f1a3379b4b930bd040e2bddfa3be"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.591352 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d136192d704050264dd91b7462355a997b9f1a3379b4b930bd040e2bddfa3be" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.608730 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.618758 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c" (OuterVolumeSpecName: "kube-api-access-pl47c") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "kube-api-access-pl47c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.621335 4762 generic.go:334] "Generic (PLEG): container finished" podID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerID="82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.621413 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b6e56749-167c-4542-b79f-a374a2f7ef20","Type":"ContainerDied","Data":"82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.631575 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b6e56749-167c-4542-b79f-a374a2f7ef20","Type":"ContainerDied","Data":"06523f730bc831b54be0e92c7cb67cb3a3928c817bb3ad98e48e86c42f3addbe"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.631653 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06523f730bc831b54be0e92c7cb67cb3a3928c817bb3ad98e48e86c42f3addbe" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.634563 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.641105 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.649264 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.651940 4762 generic.go:334] "Generic (PLEG): container finished" podID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerID="e126396d1ff84e0d194527868fb00994e52bf6887b97b8770716230bf623f284" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.651998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerDied","Data":"e126396d1ff84e0d194527868fb00994e52bf6887b97b8770716230bf623f284"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.652018 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d09898b1-4f64-490a-bf9f-eb03d7219d8d","Type":"ContainerDied","Data":"100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.652029 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="100378ea26cc2c72c178357623c0cf17bf727d084b5de0f6189592f9045d3744" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.657028 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-df85s"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.661248 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7ff674b5d8-5r7qh" event={"ID":"ac020a5d-4e49-4f85-bc3c-13769c5f418a","Type":"ContainerDied","Data":"8f6976c493dcee8ad9dcacdd143d86848fe00e3693ad04efe0539c9fbe3ac74b"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.661285 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f6976c493dcee8ad9dcacdd143d86848fe00e3693ad04efe0539c9fbe3ac74b" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.661758 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.662113 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.663292 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.666082 4762 generic.go:334] "Generic (PLEG): container finished" podID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerID="62e66f771f074a66fee0a95196e2984ab7d958da1caa8b50ceaee3f6fd946317" exitCode=0 Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.666152 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerDied","Data":"62e66f771f074a66fee0a95196e2984ab7d958da1caa8b50ceaee3f6fd946317"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.666180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"46b3e9f8-479c-45b8-afc9-4c8344da0797","Type":"ContainerDied","Data":"f367df9c259cccde59cafbfd916f63c303f3d56e11c0b92da7112d53f666f700"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.666196 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f367df9c259cccde59cafbfd916f63c303f3d56e11c0b92da7112d53f666f700" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.670590 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.676733 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677489 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqsk6\" (UniqueName: \"kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6\") pod \"b8e2afde-2991-4771-ae61-dc363b0d5f04\" (UID: \"b8e2afde-2991-4771-ae61-dc363b0d5f04\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677582 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltzlw\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677652 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677725 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677792 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677839 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.677910 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.679333 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.679584 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.679773 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs\") pod \"5b55cc23-05bb-4df9-9876-d725071b9838\" (UID: \"5b55cc23-05bb-4df9-9876-d725071b9838\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.680109 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmw7j\" (UniqueName: \"kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j\") pod \"80456c7c-9571-4520-989a-53654daad82c\" (UID: \"80456c7c-9571-4520-989a-53654daad82c\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.682786 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.684825 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.684874 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689367 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689473 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689544 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689613 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689704 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9sqm\" (UniqueName: \"kubernetes.io/projected/b797ab80-d2f5-427c-b305-0aaed4e06e83-kube-api-access-j9sqm\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689778 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689847 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf8xq\" (UniqueName: \"kubernetes.io/projected/8d8907fd-9553-4758-8e14-a7f607ed4f34-kube-api-access-zf8xq\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689923 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8907fd-9553-4758-8e14-a7f607ed4f34-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.689987 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b797ab80-d2f5-427c-b305-0aaed4e06e83-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.690052 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl47c\" (UniqueName: \"kubernetes.io/projected/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-kube-api-access-pl47c\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.690120 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5b55cc23-05bb-4df9-9876-d725071b9838-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.690678 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts" (OuterVolumeSpecName: "scripts") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.692732 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.692924 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw" (OuterVolumeSpecName: "kube-api-access-ltzlw") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "kube-api-access-ltzlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.693341 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6" (OuterVolumeSpecName: "kube-api-access-gqsk6") pod "b8e2afde-2991-4771-ae61-dc363b0d5f04" (UID: "b8e2afde-2991-4771-ae61-dc363b0d5f04"). InnerVolumeSpecName "kube-api-access-gqsk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.699219 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181","Type":"ContainerDied","Data":"7b0dfec2a83c65060fee92926a69b329defaf1a934d53b00c8a4b63777d1197f"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.699260 4762 scope.go:117] "RemoveContainer" containerID="13924ee637a2d8e6d84bc0168317a287fc78cd42d90586f3f1c1c46d4723c487" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.699871 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.703019 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4dc0b86c-7af2-4320-9b39-735213fb6609","Type":"ContainerDied","Data":"c7a9da66efad426817ea74dbc6681a4f39048959f992f44b2f18582d8ac3a70e"} Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.703117 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.718816 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.722655 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j" (OuterVolumeSpecName: "kube-api-access-tmw7j") pod "80456c7c-9571-4520-989a-53654daad82c" (UID: "80456c7c-9571-4520-989a-53654daad82c"). InnerVolumeSpecName "kube-api-access-tmw7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.725030 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.728691 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.764681 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.775421 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.795872 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.795917 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.795938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8k9m\" (UniqueName: \"kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796127 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796170 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796191 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796237 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796258 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796280 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796299 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796313 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796342 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796370 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796396 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796421 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfgbx\" (UniqueName: \"kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796443 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796462 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796482 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26dvs\" (UniqueName: \"kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796509 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796524 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796540 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796781 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796804 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796820 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796880 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796907 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796936 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796960 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn2zx\" (UniqueName: \"kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx\") pod \"96d5f387-4c72-4cc6-9776-56fcb49b8851\" (UID: \"96d5f387-4c72-4cc6-9776-56fcb49b8851\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.796992 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797013 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data\") pod \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\" (UID: \"b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797051 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle\") pod \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\" (UID: \"ac020a5d-4e49-4f85-bc3c-13769c5f418a\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797093 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle\") pod \"4dc0b86c-7af2-4320-9b39-735213fb6609\" (UID: \"4dc0b86c-7af2-4320-9b39-735213fb6609\") " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797534 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797554 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797564 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmw7j\" (UniqueName: \"kubernetes.io/projected/80456c7c-9571-4520-989a-53654daad82c-kube-api-access-tmw7j\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797573 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797582 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797593 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqsk6\" (UniqueName: \"kubernetes.io/projected/b8e2afde-2991-4771-ae61-dc363b0d5f04-kube-api-access-gqsk6\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797605 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltzlw\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-kube-api-access-ltzlw\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.797616 4762 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5b55cc23-05bb-4df9-9876-d725071b9838-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.798238 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.801928 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.803902 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.804380 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs" (OuterVolumeSpecName: "logs") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.804869 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.804902 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs" (OuterVolumeSpecName: "logs") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.805153 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx" (OuterVolumeSpecName: "kube-api-access-xfgbx") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "kube-api-access-xfgbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.805671 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.807746 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.808474 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.810942 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs" (OuterVolumeSpecName: "logs") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.824043 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.827566 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m" (OuterVolumeSpecName: "kube-api-access-t8k9m") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "kube-api-access-t8k9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.827594 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts" (OuterVolumeSpecName: "scripts") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.828226 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx" (OuterVolumeSpecName: "kube-api-access-kn2zx") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "kube-api-access-kn2zx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.828410 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets" (OuterVolumeSpecName: "secrets") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.829492 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts" (OuterVolumeSpecName: "scripts") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.832397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts" (OuterVolumeSpecName: "scripts") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.836029 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs" (OuterVolumeSpecName: "kube-api-access-26dvs") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "kube-api-access-26dvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.844011 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.844117 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.899969 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfgbx\" (UniqueName: \"kubernetes.io/projected/ac020a5d-4e49-4f85-bc3c-13769c5f418a-kube-api-access-xfgbx\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900001 4762 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900013 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26dvs\" (UniqueName: \"kubernetes.io/projected/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-kube-api-access-26dvs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900024 4762 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900034 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900043 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac020a5d-4e49-4f85-bc3c-13769c5f418a-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900053 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900066 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900076 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900088 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn2zx\" (UniqueName: \"kubernetes.io/projected/96d5f387-4c72-4cc6-9776-56fcb49b8851-kube-api-access-kn2zx\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900099 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900110 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900120 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8k9m\" (UniqueName: \"kubernetes.io/projected/4dc0b86c-7af2-4320-9b39-735213fb6609-kube-api-access-t8k9m\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900147 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900159 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4dc0b86c-7af2-4320-9b39-735213fb6609-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900170 4762 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4dc0b86c-7af2-4320-9b39-735213fb6609-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900181 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d5f387-4c72-4cc6-9776-56fcb49b8851-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900198 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900215 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900228 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.900241 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.901734 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.939161 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.944735 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data" (OuterVolumeSpecName: "config-data") pod "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" (UID: "a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.952291 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.981783 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data" (OuterVolumeSpecName: "config-data") pod "5b55cc23-05bb-4df9-9876-d725071b9838" (UID: "5b55cc23-05bb-4df9-9876-d725071b9838"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.993606 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:24 crc kubenswrapper[4762]: I1009 13:50:24.996942 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004815 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004838 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004850 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004863 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004873 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b55cc23-05bb-4df9-9876-d725071b9838-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004885 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.004896 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.010805 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.011136 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" path="/var/lib/kubelet/pods/39780bc9-4ec7-4578-b64e-40c2a1bba06c/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.012354 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" path="/var/lib/kubelet/pods/73168d57-30d8-4389-aa93-cacc6b07f705/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.013416 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d8907fd-9553-4758-8e14-a7f607ed4f34" path="/var/lib/kubelet/pods/8d8907fd-9553-4758-8e14-a7f607ed4f34/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.015153 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" path="/var/lib/kubelet/pods/aa137ef5-e1eb-4e54-a8e3-4a312e167837/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.017187 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b797ab80-d2f5-427c-b305-0aaed4e06e83" path="/var/lib/kubelet/pods/b797ab80-d2f5-427c-b305-0aaed4e06e83/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.018067 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" path="/var/lib/kubelet/pods/de9e8701-0980-4e1d-beb6-bc897f6a3e5f/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.018912 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4dc0b86c-7af2-4320-9b39-735213fb6609" (UID: "4dc0b86c-7af2-4320-9b39-735213fb6609"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.019082 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e616fe11-8347-4520-b8e3-8aec90aac784" path="/var/lib/kubelet/pods/e616fe11-8347-4520-b8e3-8aec90aac784/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.020302 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9fca521-9caa-4bfe-8cbc-f2b0467b2a50" path="/var/lib/kubelet/pods/e9fca521-9caa-4bfe-8cbc-f2b0467b2a50/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.023904 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f82e3f43-b73d-41d9-8ce1-c466e29055a5" path="/var/lib/kubelet/pods/f82e3f43-b73d-41d9-8ce1-c466e29055a5/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.026962 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92" path="/var/lib/kubelet/pods/fda4b3e7-20b8-4d16-8ccd-d0e4ce4c2f92/volumes" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.051027 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data" (OuterVolumeSpecName: "config-data") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.055586 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.082069 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.088762 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data" (OuterVolumeSpecName: "config-data") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.101891 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" (UID: "b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.104534 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data" (OuterVolumeSpecName: "config-data") pod "96d5f387-4c72-4cc6-9776-56fcb49b8851" (UID: "96d5f387-4c72-4cc6-9776-56fcb49b8851"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.106967 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.107001 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.107016 4762 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dc0b86c-7af2-4320-9b39-735213fb6609-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.107029 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d5f387-4c72-4cc6-9776-56fcb49b8851-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.107040 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: E1009 13:50:25.107086 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 09 13:50:25 crc kubenswrapper[4762]: E1009 13:50:25.107150 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data podName:0ca3d4c1-b9e5-4443-8102-7739602cbd2f nodeName:}" failed. No retries permitted until 2025-10-09 13:50:33.107129595 +0000 UTC m=+1508.880920694 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data") pod "rabbitmq-server-0" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f") : configmap "rabbitmq-config-data" not found Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.108042 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.108056 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.108068 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.118958 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.132057 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.167099 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ac020a5d-4e49-4f85-bc3c-13769c5f418a" (UID: "ac020a5d-4e49-4f85-bc3c-13769c5f418a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.212112 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.212146 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.212156 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac020a5d-4e49-4f85-bc3c-13769c5f418a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.375964 4762 scope.go:117] "RemoveContainer" containerID="00be9b377751e0bad21e7467727a5bb1ef9635151ae430c7dd73ffcc83e71cc3" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.379095 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.422020 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.438333 4762 scope.go:117] "RemoveContainer" containerID="1b9d384b3a344bc54503bc4fd31ef99fc9dc1c64d44d9dde65cb139bc35f38fd" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.451842 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.517398 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.519462 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config\") pod \"7c325595-23e7-465e-a533-181eef8ba528\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.519542 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4wff\" (UniqueName: \"kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff\") pod \"ffc2314d-5e5f-45e6-9134-9879b35e0f2d\" (UID: \"ffc2314d-5e5f-45e6-9134-9879b35e0f2d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.519703 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs\") pod \"7c325595-23e7-465e-a533-181eef8ba528\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.519789 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle\") pod \"7c325595-23e7-465e-a533-181eef8ba528\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.519940 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbngj\" (UniqueName: \"kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj\") pod \"7c325595-23e7-465e-a533-181eef8ba528\" (UID: \"7c325595-23e7-465e-a533-181eef8ba528\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.577968 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj" (OuterVolumeSpecName: "kube-api-access-tbngj") pod "7c325595-23e7-465e-a533-181eef8ba528" (UID: "7c325595-23e7-465e-a533-181eef8ba528"). InnerVolumeSpecName "kube-api-access-tbngj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.592690 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.622843 4762 scope.go:117] "RemoveContainer" containerID="6791b06acce090a85fe3858d54be64ff77dbb596394dad11205a3c2d3f6e66f2" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.623154 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.623830 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff" (OuterVolumeSpecName: "kube-api-access-w4wff") pod "ffc2314d-5e5f-45e6-9134-9879b35e0f2d" (UID: "ffc2314d-5e5f-45e6-9134-9879b35e0f2d"). InnerVolumeSpecName "kube-api-access-w4wff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.624780 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.624831 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.624896 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stqtb\" (UniqueName: \"kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.624938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625012 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625031 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625072 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625095 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml\") pod \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\" (UID: \"96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625475 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbngj\" (UniqueName: \"kubernetes.io/projected/7c325595-23e7-465e-a533-181eef8ba528-kube-api-access-tbngj\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.625486 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4wff\" (UniqueName: \"kubernetes.io/projected/ffc2314d-5e5f-45e6-9134-9879b35e0f2d-kube-api-access-w4wff\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.641514 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.647007 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.661928 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.664044 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts" (OuterVolumeSpecName: "scripts") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.711699 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c325595-23e7-465e-a533-181eef8ba528" (UID: "7c325595-23e7-465e-a533-181eef8ba528"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.716346 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.717434 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "7c325595-23e7-465e-a533-181eef8ba528" (UID: "7c325595-23e7-465e-a533-181eef8ba528"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.717571 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726256 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74m8v\" (UniqueName: \"kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v\") pod \"e56a40fb-847a-4e59-981b-369559466cb1\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726366 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bpwg\" (UniqueName: \"kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726386 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726440 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726460 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data\") pod \"e56a40fb-847a-4e59-981b-369559466cb1\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726489 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726557 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle\") pod \"e56a40fb-847a-4e59-981b-369559466cb1\" (UID: \"e56a40fb-847a-4e59-981b-369559466cb1\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726575 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726607 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id\") pod \"46b3e9f8-479c-45b8-afc9-4c8344da0797\" (UID: \"46b3e9f8-479c-45b8-afc9-4c8344da0797\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726936 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726956 4762 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726979 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726988 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.726997 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.727044 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.729139 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.730283 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb" (OuterVolumeSpecName: "kube-api-access-stqtb") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "kube-api-access-stqtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.739307 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"920bd70a-a807-4848-baf7-5bfc033838ee","Type":"ContainerDied","Data":"81d25646935e58f5833b53ef70abc85a4cc338babc0b018008136ef95aa4706a"} Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.739541 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81d25646935e58f5833b53ef70abc85a4cc338babc0b018008136ef95aa4706a" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.741864 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v" (OuterVolumeSpecName: "kube-api-access-74m8v") pod "e56a40fb-847a-4e59-981b-369559466cb1" (UID: "e56a40fb-847a-4e59-981b-369559466cb1"). InnerVolumeSpecName "kube-api-access-74m8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.742944 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.744570 4762 generic.go:334] "Generic (PLEG): container finished" podID="d71b766d-302c-456b-ae8f-9befcb684b6a" containerID="2f7e1f0c224216180f20da7a881c8e33c243db93686be9f1287e0424bbd9d342" exitCode=1 Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.744843 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican272a-account-delete-z2pw8" event={"ID":"d71b766d-302c-456b-ae8f-9befcb684b6a","Type":"ContainerDied","Data":"2f7e1f0c224216180f20da7a881c8e33c243db93686be9f1287e0424bbd9d342"} Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.766592 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts" (OuterVolumeSpecName: "scripts") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.766690 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.770805 4762 generic.go:334] "Generic (PLEG): container finished" podID="7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" containerID="8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a" exitCode=1 Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.771036 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi3399-account-delete-l75cx" event={"ID":"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2","Type":"ContainerDied","Data":"8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a"} Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.782944 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg" (OuterVolumeSpecName: "kube-api-access-2bpwg") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "kube-api-access-2bpwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.819460 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c78979758-qtqdp" event={"ID":"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1","Type":"ContainerDied","Data":"503629b9433525adb8785606dd0badfc76c6ade4081c6a7b306be8a6a78e0554"} Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.819496 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="503629b9433525adb8785606dd0badfc76c6ade4081c6a7b306be8a6a78e0554" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.823379 4762 generic.go:334] "Generic (PLEG): container finished" podID="ecf33027-d452-4ccd-a23f-52697374958c" containerID="334bbe3d62baa0c1b8660026714f6e35997b198ea2d286453849256999eda71f" exitCode=1 Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.823454 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementd6ff-account-delete-xl4vt" event={"ID":"ecf33027-d452-4ccd-a23f-52697374958c","Type":"ContainerDied","Data":"334bbe3d62baa0c1b8660026714f6e35997b198ea2d286453849256999eda71f"} Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.828333 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs\") pod \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.828984 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn5lb\" (UniqueName: \"kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb\") pod \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829026 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data\") pod \"b6e56749-167c-4542-b79f-a374a2f7ef20\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829104 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4k7c\" (UniqueName: \"kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c\") pod \"b6e56749-167c-4542-b79f-a374a2f7ef20\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829257 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle\") pod \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829294 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle\") pod \"b6e56749-167c-4542-b79f-a374a2f7ef20\" (UID: \"b6e56749-167c-4542-b79f-a374a2f7ef20\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829353 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data\") pod \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.829420 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs\") pod \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\" (UID: \"d09898b1-4f64-490a-bf9f-eb03d7219d8d\") " Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830020 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bpwg\" (UniqueName: \"kubernetes.io/projected/46b3e9f8-479c-45b8-afc9-4c8344da0797-kube-api-access-2bpwg\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830040 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830049 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830059 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/46b3e9f8-479c-45b8-afc9-4c8344da0797-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830123 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74m8v\" (UniqueName: \"kubernetes.io/projected/e56a40fb-847a-4e59-981b-369559466cb1-kube-api-access-74m8v\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.830134 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stqtb\" (UniqueName: \"kubernetes.io/projected/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-kube-api-access-stqtb\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.832606 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs" (OuterVolumeSpecName: "logs") pod "d09898b1-4f64-490a-bf9f-eb03d7219d8d" (UID: "d09898b1-4f64-490a-bf9f-eb03d7219d8d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.834074 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data" (OuterVolumeSpecName: "config-data") pod "e56a40fb-847a-4e59-981b-369559466cb1" (UID: "e56a40fb-847a-4e59-981b-369559466cb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.835068 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7ff674b5d8-5r7qh" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.839000 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb" (OuterVolumeSpecName: "kube-api-access-sn5lb") pod "d09898b1-4f64-490a-bf9f-eb03d7219d8d" (UID: "d09898b1-4f64-490a-bf9f-eb03d7219d8d"). InnerVolumeSpecName "kube-api-access-sn5lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.839776 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.846404 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.846855 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder1fff-account-delete-zn6rx" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.846910 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.848453 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d457f6487-fjznb" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.848505 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb30a-account-delete-hlx4d" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.849806 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.849895 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance7662-account-delete-9tfgj" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.850736 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.851362 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.882011 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c" (OuterVolumeSpecName: "kube-api-access-h4k7c") pod "b6e56749-167c-4542-b79f-a374a2f7ef20" (UID: "b6e56749-167c-4542-b79f-a374a2f7ef20"). InnerVolumeSpecName "kube-api-access-h4k7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.888696 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.888774 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938244 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d09898b1-4f64-490a-bf9f-eb03d7219d8d-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938293 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn5lb\" (UniqueName: \"kubernetes.io/projected/d09898b1-4f64-490a-bf9f-eb03d7219d8d-kube-api-access-sn5lb\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938308 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4k7c\" (UniqueName: \"kubernetes.io/projected/b6e56749-167c-4542-b79f-a374a2f7ef20-kube-api-access-h4k7c\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938323 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938340 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.938356 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.957994 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e56a40fb-847a-4e59-981b-369559466cb1" (UID: "e56a40fb-847a-4e59-981b-369559466cb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.971193 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data" (OuterVolumeSpecName: "config-data") pod "d09898b1-4f64-490a-bf9f-eb03d7219d8d" (UID: "d09898b1-4f64-490a-bf9f-eb03d7219d8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:25 crc kubenswrapper[4762]: I1009 13:50:25.972060 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d09898b1-4f64-490a-bf9f-eb03d7219d8d" (UID: "d09898b1-4f64-490a-bf9f-eb03d7219d8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.001687 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6e56749-167c-4542-b79f-a374a2f7ef20" (UID: "b6e56749-167c-4542-b79f-a374a2f7ef20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.008185 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data" (OuterVolumeSpecName: "config-data") pod "b6e56749-167c-4542-b79f-a374a2f7ef20" (UID: "b6e56749-167c-4542-b79f-a374a2f7ef20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.039981 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.040044 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.040059 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.040071 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e56a40fb-847a-4e59-981b-369559466cb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.040083 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6e56749-167c-4542-b79f-a374a2f7ef20-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.041550 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.078606 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.085437 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "7c325595-23e7-465e-a533-181eef8ba528" (UID: "7c325595-23e7-465e-a533-181eef8ba528"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.091564 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data" (OuterVolumeSpecName: "config-data") pod "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" (UID: "96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.094816 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d09898b1-4f64-490a-bf9f-eb03d7219d8d" (UID: "d09898b1-4f64-490a-bf9f-eb03d7219d8d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.098095 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data" (OuterVolumeSpecName: "config-data") pod "46b3e9f8-479c-45b8-afc9-4c8344da0797" (UID: "46b3e9f8-479c-45b8-afc9-4c8344da0797"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: E1009 13:50:26.141473 4762 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141504 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: E1009 13:50:26.141542 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data podName:c3841f71-5204-469f-b755-e030281725d1 nodeName:}" failed. No retries permitted until 2025-10-09 13:50:34.141522822 +0000 UTC m=+1509.915313861 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data") pod "rabbitmq-cell1-server-0" (UID: "c3841f71-5204-469f-b755-e030281725d1") : configmap "rabbitmq-cell1-config-data" not found Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141569 4762 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141582 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46b3e9f8-479c-45b8-afc9-4c8344da0797-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141592 4762 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09898b1-4f64-490a-bf9f-eb03d7219d8d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141603 4762 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c325595-23e7-465e-a533-181eef8ba528-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.141612 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.201695 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.222008 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.258855 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.277847 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.288580 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.288766 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.298828 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.304877 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder1fff-account-delete-zn6rx"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.313823 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.324187 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.340235 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344316 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344408 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344439 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data\") pod \"920bd70a-a807-4848-baf7-5bfc033838ee\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344473 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344495 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle\") pod \"920bd70a-a807-4848-baf7-5bfc033838ee\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344542 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344563 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344593 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config\") pod \"920bd70a-a807-4848-baf7-5bfc033838ee\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344616 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344665 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344722 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcvsn\" (UniqueName: \"kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344752 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs\") pod \"920bd70a-a807-4848-baf7-5bfc033838ee\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344793 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344823 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t2mb\" (UniqueName: \"kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb\") pod \"d71b766d-302c-456b-ae8f-9befcb684b6a\" (UID: \"d71b766d-302c-456b-ae8f-9befcb684b6a\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344851 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344885 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344936 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htmw5\" (UniqueName: \"kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5\") pod \"920bd70a-a807-4848-baf7-5bfc033838ee\" (UID: \"920bd70a-a807-4848-baf7-5bfc033838ee\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344961 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs\") pod \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\" (UID: \"f5b14562-fc3d-48fc-b83d-c12e5c9617c6\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.344997 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnxbd\" (UniqueName: \"kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd\") pod \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\" (UID: \"2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.348233 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs" (OuterVolumeSpecName: "logs") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.349741 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd" (OuterVolumeSpecName: "kube-api-access-lnxbd") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "kube-api-access-lnxbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.349828 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.350472 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data" (OuterVolumeSpecName: "config-data") pod "920bd70a-a807-4848-baf7-5bfc033838ee" (UID: "920bd70a-a807-4848-baf7-5bfc033838ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.352705 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.356405 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.358236 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "920bd70a-a807-4848-baf7-5bfc033838ee" (UID: "920bd70a-a807-4848-baf7-5bfc033838ee"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.358261 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs" (OuterVolumeSpecName: "logs") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.358804 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5" (OuterVolumeSpecName: "kube-api-access-htmw5") pod "920bd70a-a807-4848-baf7-5bfc033838ee" (UID: "920bd70a-a807-4848-baf7-5bfc033838ee"). InnerVolumeSpecName "kube-api-access-htmw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.359294 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb" (OuterVolumeSpecName: "kube-api-access-5t2mb") pod "d71b766d-302c-456b-ae8f-9befcb684b6a" (UID: "d71b766d-302c-456b-ae8f-9befcb684b6a"). InnerVolumeSpecName "kube-api-access-5t2mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.362623 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.368107 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-6d457f6487-fjznb"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.380160 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn" (OuterVolumeSpecName: "kube-api-access-mcvsn") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "kube-api-access-mcvsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.398615 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.409850 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance7662-account-delete-9tfgj"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.410170 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.423982 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "920bd70a-a807-4848-baf7-5bfc033838ee" (UID: "920bd70a-a807-4848-baf7-5bfc033838ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.424077 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.426405 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.429278 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-7ff674b5d8-5r7qh"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.434682 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data" (OuterVolumeSpecName: "config-data") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.434598 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.442213 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data" (OuterVolumeSpecName: "config-data") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.446492 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zl4p\" (UniqueName: \"kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p\") pod \"ecf33027-d452-4ccd-a23f-52697374958c\" (UID: \"ecf33027-d452-4ccd-a23f-52697374958c\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447037 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447059 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447068 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447078 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447086 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447094 4762 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/920bd70a-a807-4848-baf7-5bfc033838ee-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447104 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447111 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447119 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcvsn\" (UniqueName: \"kubernetes.io/projected/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-kube-api-access-mcvsn\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447128 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447138 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t2mb\" (UniqueName: \"kubernetes.io/projected/d71b766d-302c-456b-ae8f-9befcb684b6a-kube-api-access-5t2mb\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447146 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447153 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htmw5\" (UniqueName: \"kubernetes.io/projected/920bd70a-a807-4848-baf7-5bfc033838ee-kube-api-access-htmw5\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.447162 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnxbd\" (UniqueName: \"kubernetes.io/projected/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-kube-api-access-lnxbd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.448495 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.452147 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.456075 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f5b14562-fc3d-48fc-b83d-c12e5c9617c6" (UID: "f5b14562-fc3d-48fc-b83d-c12e5c9617c6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.457189 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p" (OuterVolumeSpecName: "kube-api-access-4zl4p") pod "ecf33027-d452-4ccd-a23f-52697374958c" (UID: "ecf33027-d452-4ccd-a23f-52697374958c"). InnerVolumeSpecName "kube-api-access-4zl4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.489810 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.503661 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.507610 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.513973 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.529593 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" (UID: "2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.535832 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.536335 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "920bd70a-a807-4848-baf7-5bfc033838ee" (UID: "920bd70a-a807-4848-baf7-5bfc033838ee"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.543755 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronb30a-account-delete-hlx4d"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548130 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsmzk\" (UniqueName: \"kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk\") pod \"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2\" (UID: \"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548911 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548937 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548948 4762 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/920bd70a-a807-4848-baf7-5bfc033838ee-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548959 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548969 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5b14562-fc3d-48fc-b83d-c12e5c9617c6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.548980 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zl4p\" (UniqueName: \"kubernetes.io/projected/ecf33027-d452-4ccd-a23f-52697374958c-kube-api-access-4zl4p\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.552238 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk" (OuterVolumeSpecName: "kube-api-access-fsmzk") pod "7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" (UID: "7428fe70-b1c9-4595-a9b2-fa90ac21c3b2"). InnerVolumeSpecName "kube-api-access-fsmzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.557777 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.562087 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.650216 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsmzk\" (UniqueName: \"kubernetes.io/projected/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2-kube-api-access-fsmzk\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.888258 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.933470 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39888a9b-c1cd-496e-b44e-a27212faac74/ovn-northd/0.log" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.933592 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.935131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican272a-account-delete-z2pw8" event={"ID":"d71b766d-302c-456b-ae8f-9befcb684b6a","Type":"ContainerDied","Data":"7f068db3757ded425e864366a700a38dd636defb4a2c3ccfa66902df029f8ded"} Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.935213 4762 scope.go:117] "RemoveContainer" containerID="2f7e1f0c224216180f20da7a881c8e33c243db93686be9f1287e0424bbd9d342" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.935750 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican272a-account-delete-z2pw8" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.943523 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_39888a9b-c1cd-496e-b44e-a27212faac74/ovn-northd/0.log" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.943613 4762 generic.go:334] "Generic (PLEG): container finished" podID="39888a9b-c1cd-496e-b44e-a27212faac74" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" exitCode=139 Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.943717 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerDied","Data":"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e"} Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.943775 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"39888a9b-c1cd-496e-b44e-a27212faac74","Type":"ContainerDied","Data":"bebe6b7cbd2ad19cd9fc4c39b721ba76216c7c2cf923ba0ba6d7f8fff28445c8"} Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.943911 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.951149 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-98dbd4bdf-stq5g" event={"ID":"06ff304d-2fbe-412d-8eeb-098ff74fc7a6","Type":"ContainerDied","Data":"041eae0fb2897099972d7b4151296f3413a47cd494bf2d693195499557b5f03d"} Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.951203 4762 generic.go:334] "Generic (PLEG): container finished" podID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" containerID="041eae0fb2897099972d7b4151296f3413a47cd494bf2d693195499557b5f03d" exitCode=0 Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.994784 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:26 crc kubenswrapper[4762]: I1009 13:50:26.996715 4762 scope.go:117] "RemoveContainer" containerID="5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001523 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001701 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001746 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001839 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001874 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.001974 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.002020 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.002126 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t98p\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.002435 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.002528 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.002579 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins\") pod \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\" (UID: \"0ca3d4c1-b9e5-4443-8102-7739602cbd2f\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.003481 4762 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.003500 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.004934 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi3399-account-delete-l75cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.008672 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.009372 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.009957 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.011820 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p" (OuterVolumeSpecName: "kube-api-access-7t98p") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "kube-api-access-7t98p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.016991 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.023869 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info" (OuterVolumeSpecName: "pod-info") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.031066 4762 generic.go:334] "Generic (PLEG): container finished" podID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerID="f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" exitCode=0 Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.031217 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.034300 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" path="/var/lib/kubelet/pods/46b3e9f8-479c-45b8-afc9-4c8344da0797/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.035384 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" path="/var/lib/kubelet/pods/4dc0b86c-7af2-4320-9b39-735213fb6609/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.036054 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" path="/var/lib/kubelet/pods/5b55cc23-05bb-4df9-9876-d725071b9838/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039611 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039690 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c78979758-qtqdp" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039718 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039723 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placementd6ff-account-delete-xl4vt" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039830 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.039620 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.041339 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c325595-23e7-465e-a533-181eef8ba528" path="/var/lib/kubelet/pods/7c325595-23e7-465e-a533-181eef8ba528/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.041898 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80456c7c-9571-4520-989a-53654daad82c" path="/var/lib/kubelet/pods/80456c7c-9571-4520-989a-53654daad82c/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.042497 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" path="/var/lib/kubelet/pods/96d5f387-4c72-4cc6-9776-56fcb49b8851/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.044783 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" path="/var/lib/kubelet/pods/96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.047963 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" path="/var/lib/kubelet/pods/a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.050623 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" path="/var/lib/kubelet/pods/ac020a5d-4e49-4f85-bc3c-13769c5f418a/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.051466 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" path="/var/lib/kubelet/pods/b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.055965 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data" (OuterVolumeSpecName: "config-data") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.068280 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e2afde-2991-4771-ae61-dc363b0d5f04" path="/var/lib/kubelet/pods/b8e2afde-2991-4771-ae61-dc363b0d5f04/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.069012 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e56a40fb-847a-4e59-981b-369559466cb1" path="/var/lib/kubelet/pods/e56a40fb-847a-4e59-981b-369559466cb1/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.069452 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffc2314d-5e5f-45e6-9134-9879b35e0f2d" path="/var/lib/kubelet/pods/ffc2314d-5e5f-45e6-9134-9879b35e0f2d/volumes" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.092369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf" (OuterVolumeSpecName: "server-conf") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.112968 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113022 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113054 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113100 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113129 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113612 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw9n2\" (UniqueName: \"kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.113727 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts\") pod \"39888a9b-c1cd-496e-b44e-a27212faac74\" (UID: \"39888a9b-c1cd-496e-b44e-a27212faac74\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114178 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114198 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114210 4762 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114234 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114246 4762 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114258 4762 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114270 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t98p\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-kube-api-access-7t98p\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114282 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.114294 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.115218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts" (OuterVolumeSpecName: "scripts") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.117412 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.123441 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config" (OuterVolumeSpecName: "config") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.148552 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi3399-account-delete-l75cx" event={"ID":"7428fe70-b1c9-4595-a9b2-fa90ac21c3b2","Type":"ContainerDied","Data":"f3bc8912e293d82c078972cfed0700047949ccde8ae327569544fba2a873164f"} Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.148655 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerDied","Data":"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef"} Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.148656 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2" (OuterVolumeSpecName: "kube-api-access-tw9n2") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "kube-api-access-tw9n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.148676 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0ca3d4c1-b9e5-4443-8102-7739602cbd2f","Type":"ContainerDied","Data":"775e620beb1c7d647ce166687e7f59bd5039abd62eae5c699def4466175fda6d"} Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.148754 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placementd6ff-account-delete-xl4vt" event={"ID":"ecf33027-d452-4ccd-a23f-52697374958c","Type":"ContainerDied","Data":"c62eadd0a9d60975e6fc3e7ccbc2485bf73c659dc8bf2698fc5f06179af91bf1"} Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.150926 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.154899 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0ca3d4c1-b9e5-4443-8102-7739602cbd2f" (UID: "0ca3d4c1-b9e5-4443-8102-7739602cbd2f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.170298 4762 scope.go:117] "RemoveContainer" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.173616 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.199564 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215525 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw9n2\" (UniqueName: \"kubernetes.io/projected/39888a9b-c1cd-496e-b44e-a27212faac74-kube-api-access-tw9n2\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215557 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215570 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215578 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39888a9b-c1cd-496e-b44e-a27212faac74-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215586 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215596 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.215607 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0ca3d4c1-b9e5-4443-8102-7739602cbd2f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.238161 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.244916 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "39888a9b-c1cd-496e-b44e-a27212faac74" (UID: "39888a9b-c1cd-496e-b44e-a27212faac74"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.303730 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6d457f6487-fjznb" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.308295 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6d457f6487-fjznb" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.316834 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317176 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317365 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvc2p\" (UniqueName: \"kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317490 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317593 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.317859 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.318008 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys\") pod \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\" (UID: \"06ff304d-2fbe-412d-8eeb-098ff74fc7a6\") " Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.318847 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.318983 4762 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39888a9b-c1cd-496e-b44e-a27212faac74-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.324929 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.325753 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p" (OuterVolumeSpecName: "kube-api-access-gvc2p") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "kube-api-access-gvc2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.326628 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.328321 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts" (OuterVolumeSpecName: "scripts") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.345828 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.350440 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data" (OuterVolumeSpecName: "config-data") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.371181 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.372822 4762 scope.go:117] "RemoveContainer" containerID="5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.376112 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a\": container with ID starting with 5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a not found: ID does not exist" containerID="5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.376145 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a"} err="failed to get container status \"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a\": rpc error: code = NotFound desc = could not find container \"5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a\": container with ID starting with 5b0ad70d278446d3548567be96b4c62877cd198d62cd13385055a63295deb65a not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.376171 4762 scope.go:117] "RemoveContainer" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.376811 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e\": container with ID starting with 4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e not found: ID does not exist" containerID="4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.376836 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e"} err="failed to get container status \"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e\": rpc error: code = NotFound desc = could not find container \"4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e\": container with ID starting with 4162553b0af34403f13cf39cadfd4b05aeaf6a4b793292efa6af66dcae48120e not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.376857 4762 scope.go:117] "RemoveContainer" containerID="8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.388183 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.388323 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.390391 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.400462 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.400784 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.400864 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.409729 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420326 4762 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420364 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420376 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420388 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420399 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420410 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvc2p\" (UniqueName: \"kubernetes.io/projected/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-kube-api-access-gvc2p\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.420422 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.428116 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7c78979758-qtqdp"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.442076 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "06ff304d-2fbe-412d-8eeb-098ff74fc7a6" (UID: "06ff304d-2fbe-412d-8eeb-098ff74fc7a6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.443467 4762 scope.go:117] "RemoveContainer" containerID="f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.443514 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.468024 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.471166 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.483152 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi3399-account-delete-l75cx"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.488641 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.496180 4762 scope.go:117] "RemoveContainer" containerID="6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.496667 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.500389 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a\": container with ID starting with 8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a not found: ID does not exist" containerID="8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.500456 4762 kuberuntime_gc.go:361] "Error getting ContainerStatus for containerID" containerID="8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a" err="rpc error: code = NotFound desc = could not find container \"8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a\": container with ID starting with 8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.500499 4762 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/novaapi3399-account-delete-l75cx_openstack_mariadb-account-delete-8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a.log: no such file or directory" path="/var/log/containers/novaapi3399-account-delete-l75cx_openstack_mariadb-account-delete-8dcac387f0104fe0b4c3b09d4b3b425e392e3b56f15885091efa7efa96f35c1a.log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.521986 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff304d-2fbe-412d-8eeb-098ff74fc7a6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.530048 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.532487 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef\": container with ID starting with f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef not found: ID does not exist" containerID="f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.532538 4762 kuberuntime_gc.go:361] "Error getting ContainerStatus for containerID" containerID="f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" err="rpc error: code = NotFound desc = could not find container \"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef\": container with ID starting with f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.532573 4762 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/rabbitmq-server-0_openstack_rabbitmq-f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef.log: no such file or directory" path="/var/log/containers/rabbitmq-server-0_openstack_rabbitmq-f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef.log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.533119 4762 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/rabbitmq-server-0_openstack_setup-container-6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3.log: no such file or directory" path="/var/log/containers/rabbitmq-server-0_openstack_setup-container-6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3.log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.551502 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.559610 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.567701 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican272a-account-delete-z2pw8"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.582495 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.582908 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.582929 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.582955 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.582965 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-api" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.582981 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="sg-core" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.582989 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="sg-core" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583001 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71b766d-302c-456b-ae8f-9befcb684b6a" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583008 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71b766d-302c-456b-ae8f-9befcb684b6a" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583021 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="920bd70a-a807-4848-baf7-5bfc033838ee" containerName="memcached" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583028 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="920bd70a-a807-4848-baf7-5bfc033838ee" containerName="memcached" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583044 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c325595-23e7-465e-a533-181eef8ba528" containerName="kube-state-metrics" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583052 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c325595-23e7-465e-a533-181eef8ba528" containerName="kube-state-metrics" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583064 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="galera" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583072 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="galera" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583085 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80456c7c-9571-4520-989a-53654daad82c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583092 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="80456c7c-9571-4520-989a-53654daad82c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583101 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583109 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583121 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" containerName="keystone-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583130 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" containerName="keystone-api" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583144 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583152 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583159 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583166 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583177 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e2afde-2991-4771-ae61-dc363b0d5f04" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583185 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e2afde-2991-4771-ae61-dc363b0d5f04" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583194 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583201 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583219 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583226 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583236 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583244 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583259 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-central-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583266 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-central-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583303 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="ovsdbserver-nb" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583311 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="ovsdbserver-nb" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583320 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583327 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583341 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-server" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583348 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-server" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583363 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerName="nova-cell0-conductor-conductor" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583371 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerName="nova-cell0-conductor-conductor" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583381 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583389 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583402 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583410 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583419 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="init" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583426 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="init" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583436 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583443 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583453 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583462 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583477 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-notification-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583485 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-notification-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583500 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="ovsdbserver-sb" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583507 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="ovsdbserver-sb" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583519 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="setup-container" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.583526 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="setup-container" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.583538 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588683 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588697 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588704 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588720 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588726 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588734 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588741 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588749 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588755 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588765 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588771 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588783 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="probe" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588789 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="probe" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588796 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588802 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588808 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffc2314d-5e5f-45e6-9134-9879b35e0f2d" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588813 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffc2314d-5e5f-45e6-9134-9879b35e0f2d" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588822 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf33027-d452-4ccd-a23f-52697374958c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588829 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf33027-d452-4ccd-a23f-52697374958c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588840 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="cinder-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588845 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="cinder-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588852 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="rabbitmq" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588857 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="rabbitmq" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588865 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="dnsmasq-dns" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588871 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="dnsmasq-dns" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588884 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588889 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588900 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588907 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588915 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588922 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-api" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588930 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56a40fb-847a-4e59-981b-369559466cb1" containerName="nova-scheduler-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588935 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56a40fb-847a-4e59-981b-369559466cb1" containerName="nova-scheduler-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588947 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="mysql-bootstrap" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588953 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="mysql-bootstrap" Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.588964 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.588970 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589185 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="sg-core" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589199 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="probe" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589210 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="ovsdbserver-sb" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589220 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecf33027-d452-4ccd-a23f-52697374958c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589232 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-metadata" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589240 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa137ef5-e1eb-4e54-a8e3-4a312e167837" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589247 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e2afde-2991-4771-ae61-dc363b0d5f04" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589256 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b3e9f8-479c-45b8-afc9-4c8344da0797" containerName="cinder-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589266 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" containerName="nova-metadata-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589273 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589280 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589287 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-server" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589297 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" containerName="nova-cell0-conductor-conductor" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589305 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="de9e8701-0980-4e1d-beb6-bc897f6a3e5f" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589314 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc0b86c-7af2-4320-9b39-735213fb6609" containerName="galera" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589325 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" containerName="keystone-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589334 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d5e35ba-6450-49d9-907a-8a4f879a1b0f" containerName="ovn-controller" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589341 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c325595-23e7-465e-a533-181eef8ba528" containerName="kube-state-metrics" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589350 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589361 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="920bd70a-a807-4848-baf7-5bfc033838ee" containerName="memcached" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589371 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="39780bc9-4ec7-4578-b64e-40c2a1bba06c" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589378 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-notification-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589386 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589396 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fc7e86f-4e77-45a4-a90c-6b06d4907ca7" containerName="dnsmasq-dns" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589406 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="openstack-network-exporter" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589413 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589423 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" containerName="ovn-northd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589432 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="ceilometer-central-agent" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589442 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffc2314d-5e5f-45e6-9134-9879b35e0f2d" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589449 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589456 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589463 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" containerName="nova-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589472 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="80456c7c-9571-4520-989a-53654daad82c" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589482 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69763f1-c5df-4f8c-9fb8-3d7f5ac8f181" containerName="glance-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589489 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" containerName="rabbitmq" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589496 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a72b4a-cdc0-43e1-83a4-6d6629ec66c4" containerName="cinder-api-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589504 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589513 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d5f387-4c72-4cc6-9776-56fcb49b8851" containerName="glance-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589524 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac020a5d-4e49-4f85-bc3c-13769c5f418a" containerName="placement-log" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589531 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" containerName="barbican-api" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589539 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b55cc23-05bb-4df9-9876-d725071b9838" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589548 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71b766d-302c-456b-ae8f-9befcb684b6a" containerName="mariadb-account-delete" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589555 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="73168d57-30d8-4389-aa93-cacc6b07f705" containerName="ovsdbserver-nb" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589564 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e56a40fb-847a-4e59-981b-369559466cb1" containerName="nova-scheduler-scheduler" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.589577 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96ee7a8b-0aa4-42d4-9dd3-5e0350f40ce3" containerName="proxy-httpd" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.590690 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.591516 4762 scope.go:117] "RemoveContainer" containerID="f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.594904 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef"} err="failed to get container status \"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef\": rpc error: code = NotFound desc = could not find container \"f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef\": container with ID starting with f1abbdb910acded618c7b329467c1d59b4d7f9365598dbf3d11919c8b70ec8ef not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.594934 4762 scope.go:117] "RemoveContainer" containerID="6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.595355 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.611333 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placementd6ff-account-delete-xl4vt"] Oct 09 13:50:27 crc kubenswrapper[4762]: E1009 13:50:27.617290 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3\": container with ID starting with 6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3 not found: ID does not exist" containerID="6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.617336 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3"} err="failed to get container status \"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3\": rpc error: code = NotFound desc = could not find container \"6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3\": container with ID starting with 6d253aec4527a1cce29420f394ed34441df96bec1ff09c961c18414c6cae23c3 not found: ID does not exist" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.617368 4762 scope.go:117] "RemoveContainer" containerID="334bbe3d62baa0c1b8660026714f6e35997b198ea2d286453849256999eda71f" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.619538 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.632598 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.632828 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.632991 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76zmw\" (UniqueName: \"kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.668607 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.694765 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.717871 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.720920 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.733751 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76zmw\" (UniqueName: \"kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.733827 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.733882 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.734951 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.743801 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.755698 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76zmw\" (UniqueName: \"kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw\") pod \"certified-operators-5x8cx\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.837535 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.841575 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:50:27 crc kubenswrapper[4762]: I1009 13:50:27.926486 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043439 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs\") pod \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043791 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data\") pod \"9f119cc4-119d-4535-8abd-fe380d546595\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043871 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle\") pod \"9f119cc4-119d-4535-8abd-fe380d546595\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043895 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs\") pod \"9f119cc4-119d-4535-8abd-fe380d546595\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043923 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle\") pod \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043955 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom\") pod \"9f119cc4-119d-4535-8abd-fe380d546595\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.043982 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom\") pod \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044032 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zjkw\" (UniqueName: \"kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw\") pod \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044091 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data\") pod \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\" (UID: \"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044119 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzdxw\" (UniqueName: \"kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw\") pod \"9f119cc4-119d-4535-8abd-fe380d546595\" (UID: \"9f119cc4-119d-4535-8abd-fe380d546595\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044338 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs" (OuterVolumeSpecName: "logs") pod "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" (UID: "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044556 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.044948 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs" (OuterVolumeSpecName: "logs") pod "9f119cc4-119d-4535-8abd-fe380d546595" (UID: "9f119cc4-119d-4535-8abd-fe380d546595"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.050393 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw" (OuterVolumeSpecName: "kube-api-access-6zjkw") pod "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" (UID: "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd"). InnerVolumeSpecName "kube-api-access-6zjkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.050522 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9f119cc4-119d-4535-8abd-fe380d546595" (UID: "9f119cc4-119d-4535-8abd-fe380d546595"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.057800 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" (UID: "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.060642 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw" (OuterVolumeSpecName: "kube-api-access-gzdxw") pod "9f119cc4-119d-4535-8abd-fe380d546595" (UID: "9f119cc4-119d-4535-8abd-fe380d546595"). InnerVolumeSpecName "kube-api-access-gzdxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.061175 4762 generic.go:334] "Generic (PLEG): container finished" podID="c3841f71-5204-469f-b755-e030281725d1" containerID="7e81d9e72b263dd6748137f8d9a685b369c4a166ceded69631770333e18be646" exitCode=0 Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.061257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerDied","Data":"7e81d9e72b263dd6748137f8d9a685b369c4a166ceded69631770333e18be646"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.065908 4762 generic.go:334] "Generic (PLEG): container finished" podID="9f119cc4-119d-4535-8abd-fe380d546595" containerID="7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e" exitCode=0 Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.065963 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerDied","Data":"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.065982 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86957c694-98m78" event={"ID":"9f119cc4-119d-4535-8abd-fe380d546595","Type":"ContainerDied","Data":"eb8fd27334157329a670ffea0786695b26391f6fedd7ed26fb45cec4f8badad1"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.065999 4762 scope.go:117] "RemoveContainer" containerID="7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.066172 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86957c694-98m78" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.095759 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f119cc4-119d-4535-8abd-fe380d546595" (UID: "9f119cc4-119d-4535-8abd-fe380d546595"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.095888 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-98dbd4bdf-stq5g" event={"ID":"06ff304d-2fbe-412d-8eeb-098ff74fc7a6","Type":"ContainerDied","Data":"7c4fbbcedf637a27a556df8edb57f00e0ac787ed08f5e5132d65b318e3e435d0"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.095998 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-98dbd4bdf-stq5g" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.099226 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data" (OuterVolumeSpecName: "config-data") pod "9f119cc4-119d-4535-8abd-fe380d546595" (UID: "9f119cc4-119d-4535-8abd-fe380d546595"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.099791 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c6fd668c-fdnbq" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.099030 4762 generic.go:334] "Generic (PLEG): container finished" podID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerID="fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c" exitCode=0 Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.100367 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerDied","Data":"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.100722 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c6fd668c-fdnbq" event={"ID":"cebc2aaf-d953-4acb-a8cd-31119e6cd7fd","Type":"ContainerDied","Data":"3edca0b86a632b7f1eb6f8a0bd13452b2a3e5518d956598438033a3b51d59461"} Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.142157 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" (UID: "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148308 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzdxw\" (UniqueName: \"kubernetes.io/projected/9f119cc4-119d-4535-8abd-fe380d546595-kube-api-access-gzdxw\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148347 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148361 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148371 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f119cc4-119d-4535-8abd-fe380d546595-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148383 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148395 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f119cc4-119d-4535-8abd-fe380d546595-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148406 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.148418 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zjkw\" (UniqueName: \"kubernetes.io/projected/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-kube-api-access-6zjkw\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.151865 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data" (OuterVolumeSpecName: "config-data") pod "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" (UID: "cebc2aaf-d953-4acb-a8cd-31119e6cd7fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.167686 4762 scope.go:117] "RemoveContainer" containerID="41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.170110 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.204141 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.219520 4762 scope.go:117] "RemoveContainer" containerID="7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.228104 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-98dbd4bdf-stq5g"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.232091 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e\": container with ID starting with 7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e not found: ID does not exist" containerID="7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.232145 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e"} err="failed to get container status \"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e\": rpc error: code = NotFound desc = could not find container \"7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e\": container with ID starting with 7e992ff8af3860151a7923e87bea63b52421d718aed5345d7541a06de728055e not found: ID does not exist" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.232180 4762 scope.go:117] "RemoveContainer" containerID="41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.235857 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409\": container with ID starting with 41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409 not found: ID does not exist" containerID="41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.235907 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409"} err="failed to get container status \"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409\": rpc error: code = NotFound desc = could not find container \"41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409\": container with ID starting with 41672cb485f557f009724012f729848819dca7d8e0da9f593acb894ad3571409 not found: ID does not exist" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.235934 4762 scope.go:117] "RemoveContainer" containerID="041eae0fb2897099972d7b4151296f3413a47cd494bf2d693195499557b5f03d" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.249878 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.249949 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.249972 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250010 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250087 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250114 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250147 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250176 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250196 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250219 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6txz\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz\") pod \"c3841f71-5204-469f-b755-e030281725d1\" (UID: \"c3841f71-5204-469f-b755-e030281725d1\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.250482 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.253175 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.255301 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.256009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.265377 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz" (OuterVolumeSpecName: "kube-api-access-c6txz") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "kube-api-access-c6txz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.271447 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.277913 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.278536 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data" (OuterVolumeSpecName: "config-data") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.278683 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info" (OuterVolumeSpecName: "pod-info") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.287579 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.335380 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf" (OuterVolumeSpecName: "server-conf") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.344860 4762 scope.go:117] "RemoveContainer" containerID="fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355788 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355838 4762 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3841f71-5204-469f-b755-e030281725d1-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355870 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355885 4762 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355898 4762 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355911 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355924 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6txz\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-kube-api-access-c6txz\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355938 4762 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3841f71-5204-469f-b755-e030281725d1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355950 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3841f71-5204-469f-b755-e030281725d1-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.355962 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.414946 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c3841f71-5204-469f-b755-e030281725d1" (UID: "c3841f71-5204-469f-b755-e030281725d1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.434513 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.453280 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.457071 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3841f71-5204-469f-b755-e030281725d1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.458193 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.458371 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.460006 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.462603 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.462712 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.462741 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.466996 4762 scope.go:117] "RemoveContainer" containerID="034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.478909 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.478982 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.483101 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.511582 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-68c6fd668c-fdnbq"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.520083 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.543919 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-86957c694-98m78"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.555898 4762 scope.go:117] "RemoveContainer" containerID="fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.559790 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c\": container with ID starting with fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c not found: ID does not exist" containerID="fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.559837 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c"} err="failed to get container status \"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c\": rpc error: code = NotFound desc = could not find container \"fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c\": container with ID starting with fb36555264bf34968ae7dc1aef1a9384eb6b884f9ddd43e4920dbc2bbb110a7c not found: ID does not exist" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.559863 4762 scope.go:117] "RemoveContainer" containerID="034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff" Oct 09 13:50:28 crc kubenswrapper[4762]: E1009 13:50:28.560244 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff\": container with ID starting with 034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff not found: ID does not exist" containerID="034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.560274 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff"} err="failed to get container status \"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff\": rpc error: code = NotFound desc = could not find container \"034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff\": container with ID starting with 034dad204ce8e9094ec9b1c1d0b1940c69824cb50a907017058620a22d3c30ff not found: ID does not exist" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.568552 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.578626 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661315 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661371 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661403 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r65vn\" (UniqueName: \"kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661424 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661540 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661602 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661644 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661695 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.661722 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated\") pod \"e38c807b-a632-4a96-b228-c879b07fd461\" (UID: \"e38c807b-a632-4a96-b228-c879b07fd461\") " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.662195 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.662440 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.662893 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.663036 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.668191 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn" (OuterVolumeSpecName: "kube-api-access-r65vn") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "kube-api-access-r65vn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.674140 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets" (OuterVolumeSpecName: "secrets") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.674501 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.685491 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.711020 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "e38c807b-a632-4a96-b228-c879b07fd461" (UID: "e38c807b-a632-4a96-b228-c879b07fd461"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.763943 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.763984 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e38c807b-a632-4a96-b228-c879b07fd461-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.763998 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764042 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764059 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r65vn\" (UniqueName: \"kubernetes.io/projected/e38c807b-a632-4a96-b228-c879b07fd461-kube-api-access-r65vn\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764072 4762 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764083 4762 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e38c807b-a632-4a96-b228-c879b07fd461-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764095 4762 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.764106 4762 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e38c807b-a632-4a96-b228-c879b07fd461-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.800206 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.865370 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.974773 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06ff304d-2fbe-412d-8eeb-098ff74fc7a6" path="/var/lib/kubelet/pods/06ff304d-2fbe-412d-8eeb-098ff74fc7a6/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.975555 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ca3d4c1-b9e5-4443-8102-7739602cbd2f" path="/var/lib/kubelet/pods/0ca3d4c1-b9e5-4443-8102-7739602cbd2f/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.976173 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1" path="/var/lib/kubelet/pods/2affcdfa-3670-4fe9-a86a-3ee82a4d4dd1/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.977344 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39888a9b-c1cd-496e-b44e-a27212faac74" path="/var/lib/kubelet/pods/39888a9b-c1cd-496e-b44e-a27212faac74/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.977855 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7428fe70-b1c9-4595-a9b2-fa90ac21c3b2" path="/var/lib/kubelet/pods/7428fe70-b1c9-4595-a9b2-fa90ac21c3b2/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.978337 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="920bd70a-a807-4848-baf7-5bfc033838ee" path="/var/lib/kubelet/pods/920bd70a-a807-4848-baf7-5bfc033838ee/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.979279 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f119cc4-119d-4535-8abd-fe380d546595" path="/var/lib/kubelet/pods/9f119cc4-119d-4535-8abd-fe380d546595/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.979860 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6e56749-167c-4542-b79f-a374a2f7ef20" path="/var/lib/kubelet/pods/b6e56749-167c-4542-b79f-a374a2f7ef20/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.980334 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" path="/var/lib/kubelet/pods/cebc2aaf-d953-4acb-a8cd-31119e6cd7fd/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.981322 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d09898b1-4f64-490a-bf9f-eb03d7219d8d" path="/var/lib/kubelet/pods/d09898b1-4f64-490a-bf9f-eb03d7219d8d/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.981854 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d71b766d-302c-456b-ae8f-9befcb684b6a" path="/var/lib/kubelet/pods/d71b766d-302c-456b-ae8f-9befcb684b6a/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.982268 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecf33027-d452-4ccd-a23f-52697374958c" path="/var/lib/kubelet/pods/ecf33027-d452-4ccd-a23f-52697374958c/volumes" Oct 09 13:50:28 crc kubenswrapper[4762]: I1009 13:50:28.983211 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5b14562-fc3d-48fc-b83d-c12e5c9617c6" path="/var/lib/kubelet/pods/f5b14562-fc3d-48fc-b83d-c12e5c9617c6/volumes" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.086901 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.121426 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3841f71-5204-469f-b755-e030281725d1","Type":"ContainerDied","Data":"3d9a5ef4476cae012e05302d7f5d02db3213d38e25f45c4a0cebd9b3551c884d"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.121507 4762 scope.go:117] "RemoveContainer" containerID="7e81d9e72b263dd6748137f8d9a685b369c4a166ceded69631770333e18be646" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.121705 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.123958 4762 generic.go:334] "Generic (PLEG): container finished" podID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerID="8a1d3aac0c991bfa0eaa4993c1cc3da313cffbafa121c113e9a62771fed0522d" exitCode=0 Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.124778 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerDied","Data":"8a1d3aac0c991bfa0eaa4993c1cc3da313cffbafa121c113e9a62771fed0522d"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.124838 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerStarted","Data":"057ba8ada264ea0b3650e0248b34f23787095df7aefcf11e2a163ab5e1517aa8"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.138105 4762 generic.go:334] "Generic (PLEG): container finished" podID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" exitCode=0 Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.138170 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.138195 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b85dbb0-642b-4f4e-a616-7904624b2e5a","Type":"ContainerDied","Data":"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.138620 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2b85dbb0-642b-4f4e-a616-7904624b2e5a","Type":"ContainerDied","Data":"5ca63dcde2011037f2eeb735027aadd6310220f41466c6f1d06c51edad84d00f"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.162130 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.167276 4762 scope.go:117] "RemoveContainer" containerID="89d0be629bcb0cac06f9243b7e363d8a395f63ea55dd81e464a984f45d7e31cc" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.167992 4762 generic.go:334] "Generic (PLEG): container finished" podID="e38c807b-a632-4a96-b228-c879b07fd461" containerID="f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd" exitCode=0 Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.168032 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerDied","Data":"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.168059 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e38c807b-a632-4a96-b228-c879b07fd461","Type":"ContainerDied","Data":"e9658bd7dbbbb81ae32e84f83cd68312caed346a1942cbb68070ed6bc06ac419"} Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.168150 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.169086 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-674km\" (UniqueName: \"kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km\") pod \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.169190 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle\") pod \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.169261 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data\") pod \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\" (UID: \"2b85dbb0-642b-4f4e-a616-7904624b2e5a\") " Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.181510 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.182905 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km" (OuterVolumeSpecName: "kube-api-access-674km") pod "2b85dbb0-642b-4f4e-a616-7904624b2e5a" (UID: "2b85dbb0-642b-4f4e-a616-7904624b2e5a"). InnerVolumeSpecName "kube-api-access-674km". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.205037 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b85dbb0-642b-4f4e-a616-7904624b2e5a" (UID: "2b85dbb0-642b-4f4e-a616-7904624b2e5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.217873 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data" (OuterVolumeSpecName: "config-data") pod "2b85dbb0-642b-4f4e-a616-7904624b2e5a" (UID: "2b85dbb0-642b-4f4e-a616-7904624b2e5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.221559 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.222548 4762 scope.go:117] "RemoveContainer" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.226651 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.244181 4762 scope.go:117] "RemoveContainer" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" Oct 09 13:50:29 crc kubenswrapper[4762]: E1009 13:50:29.245024 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343\": container with ID starting with 69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343 not found: ID does not exist" containerID="69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.245061 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343"} err="failed to get container status \"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343\": rpc error: code = NotFound desc = could not find container \"69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343\": container with ID starting with 69b2f138f6eac1a1b6ef2395b7ac7ec1ed57835677595c9ca3b0ccd77b5d4343 not found: ID does not exist" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.245083 4762 scope.go:117] "RemoveContainer" containerID="f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.268133 4762 scope.go:117] "RemoveContainer" containerID="58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.276119 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-674km\" (UniqueName: \"kubernetes.io/projected/2b85dbb0-642b-4f4e-a616-7904624b2e5a-kube-api-access-674km\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.276154 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.276165 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b85dbb0-642b-4f4e-a616-7904624b2e5a-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.294360 4762 scope.go:117] "RemoveContainer" containerID="f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd" Oct 09 13:50:29 crc kubenswrapper[4762]: E1009 13:50:29.294881 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd\": container with ID starting with f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd not found: ID does not exist" containerID="f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.294919 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd"} err="failed to get container status \"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd\": rpc error: code = NotFound desc = could not find container \"f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd\": container with ID starting with f8ca509efc7c7613f58a2ef8f0eeb38491a89997a7b401df47a25b5237e27acd not found: ID does not exist" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.294947 4762 scope.go:117] "RemoveContainer" containerID="58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1" Oct 09 13:50:29 crc kubenswrapper[4762]: E1009 13:50:29.295319 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1\": container with ID starting with 58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1 not found: ID does not exist" containerID="58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.295341 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1"} err="failed to get container status \"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1\": rpc error: code = NotFound desc = could not find container \"58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1\": container with ID starting with 58840ed91cd949b1dbb746ac2bf81613b6b2a147a6bbeb09ef17561caa2428d1 not found: ID does not exist" Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.468404 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:50:29 crc kubenswrapper[4762]: I1009 13:50:29.472935 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.860079 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.974375 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" path="/var/lib/kubelet/pods/2b85dbb0-642b-4f4e-a616-7904624b2e5a/volumes" Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.975786 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3841f71-5204-469f-b755-e030281725d1" path="/var/lib/kubelet/pods/c3841f71-5204-469f-b755-e030281725d1/volumes" Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.976940 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e38c807b-a632-4a96-b228-c879b07fd461" path="/var/lib/kubelet/pods/e38c807b-a632-4a96-b228-c879b07fd461/volumes" Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999295 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrvr9\" (UniqueName: \"kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999443 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999517 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999553 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999660 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:30 crc kubenswrapper[4762]: I1009 13:50:30.999706 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs\") pod \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\" (UID: \"dd7f6272-8875-4adb-b8d3-1cfe2651f738\") " Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.004651 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.012926 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9" (OuterVolumeSpecName: "kube-api-access-hrvr9") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "kube-api-access-hrvr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.039786 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config" (OuterVolumeSpecName: "config") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.040699 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.044454 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.046220 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.073037 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dd7f6272-8875-4adb-b8d3-1cfe2651f738" (UID: "dd7f6272-8875-4adb-b8d3-1cfe2651f738"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100862 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100900 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100911 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-config\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100919 4762 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100930 4762 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100938 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrvr9\" (UniqueName: \"kubernetes.io/projected/dd7f6272-8875-4adb-b8d3-1cfe2651f738-kube-api-access-hrvr9\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.100947 4762 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd7f6272-8875-4adb-b8d3-1cfe2651f738-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.192051 4762 generic.go:334] "Generic (PLEG): container finished" podID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerID="6514c0148de523bd519a96f5438f33d885f0bf67ed079b9db073a0d14de05f31" exitCode=0 Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.192131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerDied","Data":"6514c0148de523bd519a96f5438f33d885f0bf67ed079b9db073a0d14de05f31"} Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.197210 4762 generic.go:334] "Generic (PLEG): container finished" podID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerID="3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d" exitCode=0 Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.197272 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c7fd4cf7c-phtkg" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.197266 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerDied","Data":"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d"} Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.197393 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c7fd4cf7c-phtkg" event={"ID":"dd7f6272-8875-4adb-b8d3-1cfe2651f738","Type":"ContainerDied","Data":"2877ed6338b6ef4354554fa46bb599052c2ab4f1b4dcc1fc7a413876bb015467"} Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.197417 4762 scope.go:117] "RemoveContainer" containerID="468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.223104 4762 scope.go:117] "RemoveContainer" containerID="3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.237384 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.243668 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-c7fd4cf7c-phtkg"] Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.249076 4762 scope.go:117] "RemoveContainer" containerID="468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138" Oct 09 13:50:31 crc kubenswrapper[4762]: E1009 13:50:31.249453 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138\": container with ID starting with 468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138 not found: ID does not exist" containerID="468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.249481 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138"} err="failed to get container status \"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138\": rpc error: code = NotFound desc = could not find container \"468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138\": container with ID starting with 468ae0587ff3d81328ddb6eb43d3aa96c3ea9e2660cdd52df58d80465e0d4138 not found: ID does not exist" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.249501 4762 scope.go:117] "RemoveContainer" containerID="3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d" Oct 09 13:50:31 crc kubenswrapper[4762]: E1009 13:50:31.249865 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d\": container with ID starting with 3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d not found: ID does not exist" containerID="3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d" Oct 09 13:50:31 crc kubenswrapper[4762]: I1009 13:50:31.249923 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d"} err="failed to get container status \"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d\": rpc error: code = NotFound desc = could not find container \"3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d\": container with ID starting with 3987d6a050a1cadc43694af370f70bb4fba67a3d6a36a32d19d7167c8ae1ca1d not found: ID does not exist" Oct 09 13:50:32 crc kubenswrapper[4762]: I1009 13:50:32.209041 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerStarted","Data":"a5ea8aad393b9eb8478da4514740f1b70188555e966ffad0b60cd21134a47479"} Oct 09 13:50:32 crc kubenswrapper[4762]: I1009 13:50:32.226869 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5x8cx" podStartSLOduration=2.528001996 podStartE2EDuration="5.226852137s" podCreationTimestamp="2025-10-09 13:50:27 +0000 UTC" firstStartedPulling="2025-10-09 13:50:29.125609905 +0000 UTC m=+1504.899400944" lastFinishedPulling="2025-10-09 13:50:31.824460046 +0000 UTC m=+1507.598251085" observedRunningTime="2025-10-09 13:50:32.225313486 +0000 UTC m=+1507.999104525" watchObservedRunningTime="2025-10-09 13:50:32.226852137 +0000 UTC m=+1508.000643176" Oct 09 13:50:33 crc kubenswrapper[4762]: I1009 13:50:33.001278 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" path="/var/lib/kubelet/pods/dd7f6272-8875-4adb-b8d3-1cfe2651f738/volumes" Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.448726 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.449047 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.449438 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.449471 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.450135 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.451312 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.453198 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:33 crc kubenswrapper[4762]: E1009 13:50:33.453245 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:37 crc kubenswrapper[4762]: I1009 13:50:37.927397 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:37 crc kubenswrapper[4762]: I1009 13:50:37.927820 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:37 crc kubenswrapper[4762]: I1009 13:50:37.972188 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:38 crc kubenswrapper[4762]: I1009 13:50:38.302956 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:38 crc kubenswrapper[4762]: I1009 13:50:38.349238 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.449339 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.449672 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.449910 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.449949 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.450683 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.451772 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.454937 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:38 crc kubenswrapper[4762]: E1009 13:50:38.455040 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:40 crc kubenswrapper[4762]: I1009 13:50:40.278179 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5x8cx" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="registry-server" containerID="cri-o://a5ea8aad393b9eb8478da4514740f1b70188555e966ffad0b60cd21134a47479" gracePeriod=2 Oct 09 13:50:41 crc kubenswrapper[4762]: E1009 13:50:41.178504 4762 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/33fec648cbdf298ca452e8dac2f48db20de89cbfbca79f22d3bcd9294ef5f727/diff" to get inode usage: stat /var/lib/containers/storage/overlay/33fec648cbdf298ca452e8dac2f48db20de89cbfbca79f22d3bcd9294ef5f727/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_rabbitmq-server-0_0ca3d4c1-b9e5-4443-8102-7739602cbd2f/rabbitmq/0.log" to get inode usage: stat /var/log/pods/openstack_rabbitmq-server-0_0ca3d4c1-b9e5-4443-8102-7739602cbd2f/rabbitmq/0.log: no such file or directory Oct 09 13:50:41 crc kubenswrapper[4762]: I1009 13:50:41.292263 4762 generic.go:334] "Generic (PLEG): container finished" podID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerID="a5ea8aad393b9eb8478da4514740f1b70188555e966ffad0b60cd21134a47479" exitCode=0 Oct 09 13:50:41 crc kubenswrapper[4762]: I1009 13:50:41.292297 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerDied","Data":"a5ea8aad393b9eb8478da4514740f1b70188555e966ffad0b60cd21134a47479"} Oct 09 13:50:41 crc kubenswrapper[4762]: I1009 13:50:41.920124 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.058849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities\") pod \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.058984 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content\") pod \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.059043 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76zmw\" (UniqueName: \"kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw\") pod \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\" (UID: \"c01df130-cb51-49a8-bfc4-4df9b51ecae1\") " Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.059604 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities" (OuterVolumeSpecName: "utilities") pod "c01df130-cb51-49a8-bfc4-4df9b51ecae1" (UID: "c01df130-cb51-49a8-bfc4-4df9b51ecae1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.065423 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw" (OuterVolumeSpecName: "kube-api-access-76zmw") pod "c01df130-cb51-49a8-bfc4-4df9b51ecae1" (UID: "c01df130-cb51-49a8-bfc4-4df9b51ecae1"). InnerVolumeSpecName "kube-api-access-76zmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.106172 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c01df130-cb51-49a8-bfc4-4df9b51ecae1" (UID: "c01df130-cb51-49a8-bfc4-4df9b51ecae1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.160158 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.160193 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76zmw\" (UniqueName: \"kubernetes.io/projected/c01df130-cb51-49a8-bfc4-4df9b51ecae1-kube-api-access-76zmw\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.160203 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c01df130-cb51-49a8-bfc4-4df9b51ecae1-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.303807 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x8cx" event={"ID":"c01df130-cb51-49a8-bfc4-4df9b51ecae1","Type":"ContainerDied","Data":"057ba8ada264ea0b3650e0248b34f23787095df7aefcf11e2a163ab5e1517aa8"} Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.303868 4762 scope.go:117] "RemoveContainer" containerID="a5ea8aad393b9eb8478da4514740f1b70188555e966ffad0b60cd21134a47479" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.303869 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x8cx" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.324953 4762 scope.go:117] "RemoveContainer" containerID="6514c0148de523bd519a96f5438f33d885f0bf67ed079b9db073a0d14de05f31" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.341705 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.344502 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5x8cx"] Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.348719 4762 scope.go:117] "RemoveContainer" containerID="8a1d3aac0c991bfa0eaa4993c1cc3da313cffbafa121c113e9a62771fed0522d" Oct 09 13:50:42 crc kubenswrapper[4762]: I1009 13:50:42.974871 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" path="/var/lib/kubelet/pods/c01df130-cb51-49a8-bfc4-4df9b51ecae1/volumes" Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.213311 4762 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/6f8d4ee80183cd722c07dac3c8a6d42f44086cd936bae7543983a5030a0405c7/diff" to get inode usage: stat /var/lib/containers/storage/overlay/6f8d4ee80183cd722c07dac3c8a6d42f44086cd936bae7543983a5030a0405c7/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_rabbitmq-cell1-server-0_c3841f71-5204-469f-b755-e030281725d1/rabbitmq/0.log" to get inode usage: stat /var/log/pods/openstack_rabbitmq-cell1-server-0_c3841f71-5204-469f-b755-e030281725d1/rabbitmq/0.log: no such file or directory Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.448416 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.448888 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.449176 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.449251 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.449778 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.451448 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.452843 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:43 crc kubenswrapper[4762]: E1009 13:50:43.452879 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:47 crc kubenswrapper[4762]: E1009 13:50:47.862976 4762 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/5ad28ab6cf3187787affcd93609f26732811715f69c9b4200d3c1f77016e0638/diff" to get inode usage: stat /var/lib/containers/storage/overlay/5ad28ab6cf3187787affcd93609f26732811715f69c9b4200d3c1f77016e0638/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_nova-cell1-conductor-0_2b85dbb0-642b-4f4e-a616-7904624b2e5a/nova-cell1-conductor-conductor/0.log" to get inode usage: stat /var/log/pods/openstack_nova-cell1-conductor-0_2b85dbb0-642b-4f4e-a616-7904624b2e5a/nova-cell1-conductor-conductor/0.log: no such file or directory Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.448996 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.449178 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 is running failed: container process not found" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.449817 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 is running failed: container process not found" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.449956 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.450336 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 is running failed: container process not found" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.450385 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.450452 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 09 13:50:48 crc kubenswrapper[4762]: E1009 13:50:48.450490 4762 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4rsdz" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.835794 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4rsdz_851f6b4a-bac4-4c7e-8d7b-46c7513269d9/ovs-vswitchd/0.log" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.836618 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968241 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlw5s\" (UniqueName: \"kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968307 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968326 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968374 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968403 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968457 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log\") pod \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\" (UID: \"851f6b4a-bac4-4c7e-8d7b-46c7513269d9\") " Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.968766 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log" (OuterVolumeSpecName: "var-log") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.969042 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run" (OuterVolumeSpecName: "var-run") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.969076 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.969101 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib" (OuterVolumeSpecName: "var-lib") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.970221 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts" (OuterVolumeSpecName: "scripts") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 13:50:48 crc kubenswrapper[4762]: I1009 13:50:48.981511 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s" (OuterVolumeSpecName: "kube-api-access-qlw5s") pod "851f6b4a-bac4-4c7e-8d7b-46c7513269d9" (UID: "851f6b4a-bac4-4c7e-8d7b-46c7513269d9"). InnerVolumeSpecName "kube-api-access-qlw5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070531 4762 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-lib\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070573 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070585 4762 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-log\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070600 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlw5s\" (UniqueName: \"kubernetes.io/projected/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-kube-api-access-qlw5s\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070614 4762 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-etc-ovs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.070625 4762 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/851f6b4a-bac4-4c7e-8d7b-46c7513269d9-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.278378 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.368995 4762 generic.go:334] "Generic (PLEG): container finished" podID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerID="eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2" exitCode=137 Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.369073 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2"} Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.369094 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.369145 4762 scope.go:117] "RemoveContainer" containerID="eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.369131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"1477cefa-db6e-45d7-adb1-d7c1369a3610","Type":"ContainerDied","Data":"d6d7950663f653c7876dea9d334c7f0696daafd78e50903800dd678990bb04e2"} Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.372136 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4rsdz_851f6b4a-bac4-4c7e-8d7b-46c7513269d9/ovs-vswitchd/0.log" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.373247 4762 generic.go:334] "Generic (PLEG): container finished" podID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" exitCode=137 Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.373295 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerDied","Data":"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6"} Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.373321 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4rsdz" event={"ID":"851f6b4a-bac4-4c7e-8d7b-46c7513269d9","Type":"ContainerDied","Data":"b6c377c3a60adc4a6b1a248142e8579e2d3076e0550d7fe8a5f03267112dd2a7"} Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.373585 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4rsdz" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375048 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67d79\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79\") pod \"1477cefa-db6e-45d7-adb1-d7c1369a3610\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375092 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache\") pod \"1477cefa-db6e-45d7-adb1-d7c1369a3610\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375119 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock\") pod \"1477cefa-db6e-45d7-adb1-d7c1369a3610\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375216 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"1477cefa-db6e-45d7-adb1-d7c1369a3610\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375576 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") pod \"1477cefa-db6e-45d7-adb1-d7c1369a3610\" (UID: \"1477cefa-db6e-45d7-adb1-d7c1369a3610\") " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375889 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock" (OuterVolumeSpecName: "lock") pod "1477cefa-db6e-45d7-adb1-d7c1369a3610" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.375977 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache" (OuterVolumeSpecName: "cache") pod "1477cefa-db6e-45d7-adb1-d7c1369a3610" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.376259 4762 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-cache\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.376282 4762 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1477cefa-db6e-45d7-adb1-d7c1369a3610-lock\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.379151 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79" (OuterVolumeSpecName: "kube-api-access-67d79") pod "1477cefa-db6e-45d7-adb1-d7c1369a3610" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610"). InnerVolumeSpecName "kube-api-access-67d79". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.380083 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "1477cefa-db6e-45d7-adb1-d7c1369a3610" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.380890 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "1477cefa-db6e-45d7-adb1-d7c1369a3610" (UID: "1477cefa-db6e-45d7-adb1-d7c1369a3610"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.400890 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.405048 4762 scope.go:117] "RemoveContainer" containerID="72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.405801 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-4rsdz"] Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.425063 4762 scope.go:117] "RemoveContainer" containerID="66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.444844 4762 scope.go:117] "RemoveContainer" containerID="e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.477590 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67d79\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-kube-api-access-67d79\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.477853 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.477993 4762 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1477cefa-db6e-45d7-adb1-d7c1369a3610-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.485680 4762 scope.go:117] "RemoveContainer" containerID="e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.494273 4762 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.502084 4762 scope.go:117] "RemoveContainer" containerID="8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.517376 4762 scope.go:117] "RemoveContainer" containerID="5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.537298 4762 scope.go:117] "RemoveContainer" containerID="567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.556375 4762 scope.go:117] "RemoveContainer" containerID="acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.574529 4762 scope.go:117] "RemoveContainer" containerID="97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.578615 4762 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.593713 4762 scope.go:117] "RemoveContainer" containerID="4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.611699 4762 scope.go:117] "RemoveContainer" containerID="eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.628937 4762 scope.go:117] "RemoveContainer" containerID="27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.645875 4762 scope.go:117] "RemoveContainer" containerID="8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.663734 4762 scope.go:117] "RemoveContainer" containerID="00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.690246 4762 scope.go:117] "RemoveContainer" containerID="eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.691048 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2\": container with ID starting with eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2 not found: ID does not exist" containerID="eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.691133 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2"} err="failed to get container status \"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2\": rpc error: code = NotFound desc = could not find container \"eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2\": container with ID starting with eacaa8b5d9b84164146ff7a584e9c725d4bb02cd996fc577efdabba2e1ba13b2 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.691175 4762 scope.go:117] "RemoveContainer" containerID="72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.691532 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0\": container with ID starting with 72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0 not found: ID does not exist" containerID="72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.691570 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0"} err="failed to get container status \"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0\": rpc error: code = NotFound desc = could not find container \"72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0\": container with ID starting with 72d5721934c08b9523e1f7661fd0cc7022a05c9c3860815c11f3195afcfb75a0 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.691589 4762 scope.go:117] "RemoveContainer" containerID="66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.692172 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a\": container with ID starting with 66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a not found: ID does not exist" containerID="66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.692232 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a"} err="failed to get container status \"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a\": rpc error: code = NotFound desc = could not find container \"66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a\": container with ID starting with 66b743c3cccfcded94f471aad6c8c7c490c328d23eeb054b9a959b77e635f85a not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.692270 4762 scope.go:117] "RemoveContainer" containerID="e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.692693 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5\": container with ID starting with e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5 not found: ID does not exist" containerID="e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.692725 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5"} err="failed to get container status \"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5\": rpc error: code = NotFound desc = could not find container \"e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5\": container with ID starting with e850af1b142da8d66827cd67251e451fbb6e56f0984b429a296cc6470f0641a5 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.692744 4762 scope.go:117] "RemoveContainer" containerID="e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.693097 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99\": container with ID starting with e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99 not found: ID does not exist" containerID="e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.693150 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99"} err="failed to get container status \"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99\": rpc error: code = NotFound desc = could not find container \"e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99\": container with ID starting with e106dddaface5bda97c181d6c83b5407d6b119fe5aa03418fc12646ee1fe1b99 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.693174 4762 scope.go:117] "RemoveContainer" containerID="8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.693776 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676\": container with ID starting with 8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676 not found: ID does not exist" containerID="8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.693809 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676"} err="failed to get container status \"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676\": rpc error: code = NotFound desc = could not find container \"8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676\": container with ID starting with 8fdc84bf366cf69541cde7674950b5c46694290b85f776bf10157a1b99e79676 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.693832 4762 scope.go:117] "RemoveContainer" containerID="5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.694155 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62\": container with ID starting with 5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62 not found: ID does not exist" containerID="5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.694191 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62"} err="failed to get container status \"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62\": rpc error: code = NotFound desc = could not find container \"5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62\": container with ID starting with 5c71985be4769cc8dca8656fd224116d6cfd9ba38fac16e449710cfac07fed62 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.694215 4762 scope.go:117] "RemoveContainer" containerID="567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.694776 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384\": container with ID starting with 567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384 not found: ID does not exist" containerID="567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.694833 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384"} err="failed to get container status \"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384\": rpc error: code = NotFound desc = could not find container \"567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384\": container with ID starting with 567e831eb11e3bbc96e4ae494a3a48f3ce30d0330873e35b1584ad729d384384 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.694855 4762 scope.go:117] "RemoveContainer" containerID="acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.695525 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18\": container with ID starting with acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18 not found: ID does not exist" containerID="acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.695567 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18"} err="failed to get container status \"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18\": rpc error: code = NotFound desc = could not find container \"acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18\": container with ID starting with acd57c56e1503069670b1070c75ca8274489494e90451ef9ccda9ba23b32fa18 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.695589 4762 scope.go:117] "RemoveContainer" containerID="97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.695984 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45\": container with ID starting with 97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45 not found: ID does not exist" containerID="97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.696016 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45"} err="failed to get container status \"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45\": rpc error: code = NotFound desc = could not find container \"97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45\": container with ID starting with 97e103b14e33cb5dbaaed4498f0256e5eaa30d65d0cbbbc37cdddb35a31cab45 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.696039 4762 scope.go:117] "RemoveContainer" containerID="4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.696525 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad\": container with ID starting with 4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad not found: ID does not exist" containerID="4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.696581 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad"} err="failed to get container status \"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad\": rpc error: code = NotFound desc = could not find container \"4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad\": container with ID starting with 4d36fbb2c5b4d9e5e823bb89be5de822963abc13a4e641b17eb2acd134030bad not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.696603 4762 scope.go:117] "RemoveContainer" containerID="eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.697113 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217\": container with ID starting with eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217 not found: ID does not exist" containerID="eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.697154 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217"} err="failed to get container status \"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217\": rpc error: code = NotFound desc = could not find container \"eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217\": container with ID starting with eb84e1b30f872410009e03d937f21e5a5aac63306bed8cebe6ff44f437abd217 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.697193 4762 scope.go:117] "RemoveContainer" containerID="27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.697978 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce\": container with ID starting with 27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce not found: ID does not exist" containerID="27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698005 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce"} err="failed to get container status \"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce\": rpc error: code = NotFound desc = could not find container \"27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce\": container with ID starting with 27f0202dba594a2b3dbd7fb92b50964f812c6b9752cc21fbcc955dd912b04bce not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698029 4762 scope.go:117] "RemoveContainer" containerID="8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.698480 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95\": container with ID starting with 8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95 not found: ID does not exist" containerID="8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698523 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95"} err="failed to get container status \"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95\": rpc error: code = NotFound desc = could not find container \"8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95\": container with ID starting with 8e56d6db5bd3ec01caa26962d4f73795f23978e73a54ad53603e87c2d20b3d95 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698543 4762 scope.go:117] "RemoveContainer" containerID="00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.698872 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80\": container with ID starting with 00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80 not found: ID does not exist" containerID="00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698903 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80"} err="failed to get container status \"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80\": rpc error: code = NotFound desc = could not find container \"00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80\": container with ID starting with 00b505833ef1de865656bd407236b581616bea0b032dd2063522f8209a228b80 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.698940 4762 scope.go:117] "RemoveContainer" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.716014 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.722985 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.740680 4762 scope.go:117] "RemoveContainer" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.758376 4762 scope.go:117] "RemoveContainer" containerID="9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.781666 4762 scope.go:117] "RemoveContainer" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.782212 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6\": container with ID starting with 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 not found: ID does not exist" containerID="6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.782260 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6"} err="failed to get container status \"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6\": rpc error: code = NotFound desc = could not find container \"6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6\": container with ID starting with 6adc8ac732e2e99b70d366ead1fb8b6aac18a135339e217a7db01f59192c2ee6 not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.782290 4762 scope.go:117] "RemoveContainer" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.782623 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c\": container with ID starting with 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c not found: ID does not exist" containerID="8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.782754 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c"} err="failed to get container status \"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c\": rpc error: code = NotFound desc = could not find container \"8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c\": container with ID starting with 8f00e71744c4d358c802b16ce5f508b3f36e83f09d97012b8034c4e61e6b235c not found: ID does not exist" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.782779 4762 scope.go:117] "RemoveContainer" containerID="9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b" Oct 09 13:50:49 crc kubenswrapper[4762]: E1009 13:50:49.783095 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b\": container with ID starting with 9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b not found: ID does not exist" containerID="9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b" Oct 09 13:50:49 crc kubenswrapper[4762]: I1009 13:50:49.783119 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b"} err="failed to get container status \"9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b\": rpc error: code = NotFound desc = could not find container \"9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b\": container with ID starting with 9440d2119ea995666bf2fdbfde3704960362a7821c57d356599df491a1018a5b not found: ID does not exist" Oct 09 13:50:50 crc kubenswrapper[4762]: I1009 13:50:50.977096 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" path="/var/lib/kubelet/pods/1477cefa-db6e-45d7-adb1-d7c1369a3610/volumes" Oct 09 13:50:50 crc kubenswrapper[4762]: I1009 13:50:50.979694 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" path="/var/lib/kubelet/pods/851f6b4a-bac4-4c7e-8d7b-46c7513269d9/volumes" Oct 09 13:50:52 crc kubenswrapper[4762]: W1009 13:50:52.076627 4762 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc01df130_cb51_49a8_bfc4_4df9b51ecae1.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc01df130_cb51_49a8_bfc4_4df9b51ecae1.slice: no such file or directory Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.407348 4762 generic.go:334] "Generic (PLEG): container finished" podID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerID="36cb5740bfd3757c1db9d2f58e72c98daf1730cccfbc6e4f523e38cdf12df964" exitCode=137 Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.407415 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerDied","Data":"36cb5740bfd3757c1db9d2f58e72c98daf1730cccfbc6e4f523e38cdf12df964"} Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.407797 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" event={"ID":"655c116e-99bf-4f34-a050-bbc1aa3e9b62","Type":"ContainerDied","Data":"8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265"} Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.407816 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f2c1f32394749bf3a3b28fbc170553d9b3864c525eedb405a0476f08a9b9265" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.408187 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.515877 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcv2m\" (UniqueName: \"kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m\") pod \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.516116 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs\") pod \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.516179 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom\") pod \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.516248 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data\") pod \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.516305 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle\") pod \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\" (UID: \"655c116e-99bf-4f34-a050-bbc1aa3e9b62\") " Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.516768 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs" (OuterVolumeSpecName: "logs") pod "655c116e-99bf-4f34-a050-bbc1aa3e9b62" (UID: "655c116e-99bf-4f34-a050-bbc1aa3e9b62"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.531594 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m" (OuterVolumeSpecName: "kube-api-access-rcv2m") pod "655c116e-99bf-4f34-a050-bbc1aa3e9b62" (UID: "655c116e-99bf-4f34-a050-bbc1aa3e9b62"). InnerVolumeSpecName "kube-api-access-rcv2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.534375 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "655c116e-99bf-4f34-a050-bbc1aa3e9b62" (UID: "655c116e-99bf-4f34-a050-bbc1aa3e9b62"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.545525 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "655c116e-99bf-4f34-a050-bbc1aa3e9b62" (UID: "655c116e-99bf-4f34-a050-bbc1aa3e9b62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.560460 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data" (OuterVolumeSpecName: "config-data") pod "655c116e-99bf-4f34-a050-bbc1aa3e9b62" (UID: "655c116e-99bf-4f34-a050-bbc1aa3e9b62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.617713 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcv2m\" (UniqueName: \"kubernetes.io/projected/655c116e-99bf-4f34-a050-bbc1aa3e9b62-kube-api-access-rcv2m\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.617750 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/655c116e-99bf-4f34-a050-bbc1aa3e9b62-logs\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.617761 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.617770 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.617778 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/655c116e-99bf-4f34-a050-bbc1aa3e9b62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.842617 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843660 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843681 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener-log" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843698 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843705 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843715 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-api" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843721 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-api" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843734 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843740 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843751 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843759 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843769 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843776 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843791 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843796 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843802 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="rsync" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843808 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="rsync" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843819 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="swift-recon-cron" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843825 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="swift-recon-cron" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843833 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="extract-content" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843843 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="extract-content" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843856 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843862 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843876 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843884 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-server" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843893 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843899 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843911 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="extract-utilities" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843918 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="extract-utilities" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843931 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843938 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843950 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="registry-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843956 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="registry-server" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843969 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="galera" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843976 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="galera" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.843986 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-httpd" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.843993 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-httpd" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844008 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844017 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844027 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844037 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844056 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server-init" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844068 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server-init" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844081 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844089 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-server" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844105 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-reaper" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844112 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-reaper" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844124 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844132 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844141 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844147 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844155 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844162 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-server" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844169 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-expirer" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844175 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-expirer" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844186 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="mysql-bootstrap" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844192 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="mysql-bootstrap" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844201 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="rabbitmq" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844208 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="rabbitmq" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844218 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844224 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844234 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844240 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844250 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844256 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844265 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="setup-container" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844272 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="setup-container" Oct 09 13:50:52 crc kubenswrapper[4762]: E1009 13:50:52.844281 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844288 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844451 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844464 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844477 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844485 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844495 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844507 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-reaper" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844516 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="swift-recon-cron" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844525 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844534 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844541 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844548 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b85dbb0-642b-4f4e-a616-7904624b2e5a" containerName="nova-cell1-conductor-conductor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844560 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e38c807b-a632-4a96-b228-c879b07fd461" containerName="galera" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844568 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844575 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="container-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844582 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-replicator" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844591 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-httpd" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844600 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844610 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cebc2aaf-d953-4acb-a8cd-31119e6cd7fd" containerName="barbican-worker" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844619 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844649 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovs-vswitchd" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844658 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="rsync" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844668 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3841f71-5204-469f-b755-e030281725d1" containerName="rabbitmq" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844678 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="851f6b4a-bac4-4c7e-8d7b-46c7513269d9" containerName="ovsdb-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844687 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-expirer" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844696 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7f6272-8875-4adb-b8d3-1cfe2651f738" containerName="neutron-api" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844706 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f119cc4-119d-4535-8abd-fe380d546595" containerName="barbican-keystone-listener-log" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844720 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c01df130-cb51-49a8-bfc4-4df9b51ecae1" containerName="registry-server" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844729 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="account-auditor" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.844736 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1477cefa-db6e-45d7-adb1-d7c1369a3610" containerName="object-updater" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.846091 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:52 crc kubenswrapper[4762]: I1009 13:50:52.854079 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.025192 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.025253 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpwz2\" (UniqueName: \"kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.025420 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.126143 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.126185 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpwz2\" (UniqueName: \"kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.126252 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.126659 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.126718 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.146771 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpwz2\" (UniqueName: \"kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2\") pod \"redhat-marketplace-wpvwt\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.170839 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.415004 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d57d9d56c-xrhgn" Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.442923 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.449016 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-5d57d9d56c-xrhgn"] Oct 09 13:50:53 crc kubenswrapper[4762]: I1009 13:50:53.677200 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:50:53 crc kubenswrapper[4762]: W1009 13:50:53.684303 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod500265b9_e453_4c7e_8b37_f9cd51aa35e3.slice/crio-b74f33c8ff2ffb8797dbfccbf10fffe992d1d41e07749bcb448db2c59ff2caa9 WatchSource:0}: Error finding container b74f33c8ff2ffb8797dbfccbf10fffe992d1d41e07749bcb448db2c59ff2caa9: Status 404 returned error can't find the container with id b74f33c8ff2ffb8797dbfccbf10fffe992d1d41e07749bcb448db2c59ff2caa9 Oct 09 13:50:54 crc kubenswrapper[4762]: I1009 13:50:54.426278 4762 generic.go:334] "Generic (PLEG): container finished" podID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerID="7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573" exitCode=0 Oct 09 13:50:54 crc kubenswrapper[4762]: I1009 13:50:54.426346 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerDied","Data":"7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573"} Oct 09 13:50:54 crc kubenswrapper[4762]: I1009 13:50:54.426748 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerStarted","Data":"b74f33c8ff2ffb8797dbfccbf10fffe992d1d41e07749bcb448db2c59ff2caa9"} Oct 09 13:50:54 crc kubenswrapper[4762]: I1009 13:50:54.975056 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655c116e-99bf-4f34-a050-bbc1aa3e9b62" path="/var/lib/kubelet/pods/655c116e-99bf-4f34-a050-bbc1aa3e9b62/volumes" Oct 09 13:50:56 crc kubenswrapper[4762]: I1009 13:50:56.447220 4762 generic.go:334] "Generic (PLEG): container finished" podID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerID="f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a" exitCode=0 Oct 09 13:50:56 crc kubenswrapper[4762]: I1009 13:50:56.447324 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerDied","Data":"f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a"} Oct 09 13:50:57 crc kubenswrapper[4762]: I1009 13:50:57.460836 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerStarted","Data":"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f"} Oct 09 13:50:57 crc kubenswrapper[4762]: I1009 13:50:57.490444 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wpvwt" podStartSLOduration=2.8279195230000003 podStartE2EDuration="5.49042469s" podCreationTimestamp="2025-10-09 13:50:52 +0000 UTC" firstStartedPulling="2025-10-09 13:50:54.42968884 +0000 UTC m=+1530.203479919" lastFinishedPulling="2025-10-09 13:50:57.092194047 +0000 UTC m=+1532.865985086" observedRunningTime="2025-10-09 13:50:57.483041016 +0000 UTC m=+1533.256832065" watchObservedRunningTime="2025-10-09 13:50:57.49042469 +0000 UTC m=+1533.264215749" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.223717 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.225656 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.233260 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.425270 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.425343 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsp74\" (UniqueName: \"kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.425483 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.526536 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsp74\" (UniqueName: \"kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.526654 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.526709 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.527238 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.527265 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.552904 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsp74\" (UniqueName: \"kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74\") pod \"community-operators-4tmlp\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:00 crc kubenswrapper[4762]: I1009 13:51:00.851889 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:01 crc kubenswrapper[4762]: I1009 13:51:01.251844 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:01 crc kubenswrapper[4762]: I1009 13:51:01.494104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerStarted","Data":"dcac6e52da6cd43bc931fa6b74f126deccb35d92eff4f31ef1c221dab07794b0"} Oct 09 13:51:01 crc kubenswrapper[4762]: I1009 13:51:01.494143 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerStarted","Data":"f87cade34eb7a5da320ee2c52707076b65814c25547f9cfd19461718e84e0687"} Oct 09 13:51:02 crc kubenswrapper[4762]: I1009 13:51:02.503978 4762 generic.go:334] "Generic (PLEG): container finished" podID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerID="dcac6e52da6cd43bc931fa6b74f126deccb35d92eff4f31ef1c221dab07794b0" exitCode=0 Oct 09 13:51:02 crc kubenswrapper[4762]: I1009 13:51:02.504263 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerDied","Data":"dcac6e52da6cd43bc931fa6b74f126deccb35d92eff4f31ef1c221dab07794b0"} Oct 09 13:51:03 crc kubenswrapper[4762]: I1009 13:51:03.172058 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:03 crc kubenswrapper[4762]: I1009 13:51:03.172397 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:03 crc kubenswrapper[4762]: I1009 13:51:03.215109 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:03 crc kubenswrapper[4762]: I1009 13:51:03.556273 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:04 crc kubenswrapper[4762]: I1009 13:51:04.525473 4762 generic.go:334] "Generic (PLEG): container finished" podID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerID="003f40f1349e4e2042934cc080f92f8715a571547202446d63d8f26fb744a035" exitCode=0 Oct 09 13:51:04 crc kubenswrapper[4762]: I1009 13:51:04.525546 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerDied","Data":"003f40f1349e4e2042934cc080f92f8715a571547202446d63d8f26fb744a035"} Oct 09 13:51:05 crc kubenswrapper[4762]: I1009 13:51:05.538409 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerStarted","Data":"428ef9b85564350138e8359a66b8e43b6465dd43cbee50146d327d9ec5aa6046"} Oct 09 13:51:05 crc kubenswrapper[4762]: I1009 13:51:05.558004 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4tmlp" podStartSLOduration=2.962966052 podStartE2EDuration="5.557977973s" podCreationTimestamp="2025-10-09 13:51:00 +0000 UTC" firstStartedPulling="2025-10-09 13:51:02.506525614 +0000 UTC m=+1538.280316653" lastFinishedPulling="2025-10-09 13:51:05.101537535 +0000 UTC m=+1540.875328574" observedRunningTime="2025-10-09 13:51:05.556044671 +0000 UTC m=+1541.329835750" watchObservedRunningTime="2025-10-09 13:51:05.557977973 +0000 UTC m=+1541.331769032" Oct 09 13:51:05 crc kubenswrapper[4762]: I1009 13:51:05.614147 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:51:05 crc kubenswrapper[4762]: I1009 13:51:05.614357 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wpvwt" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="registry-server" containerID="cri-o://2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f" gracePeriod=2 Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.009549 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.112257 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content\") pod \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.112420 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpwz2\" (UniqueName: \"kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2\") pod \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.112487 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities\") pod \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\" (UID: \"500265b9-e453-4c7e-8b37-f9cd51aa35e3\") " Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.113550 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities" (OuterVolumeSpecName: "utilities") pod "500265b9-e453-4c7e-8b37-f9cd51aa35e3" (UID: "500265b9-e453-4c7e-8b37-f9cd51aa35e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.121843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2" (OuterVolumeSpecName: "kube-api-access-vpwz2") pod "500265b9-e453-4c7e-8b37-f9cd51aa35e3" (UID: "500265b9-e453-4c7e-8b37-f9cd51aa35e3"). InnerVolumeSpecName "kube-api-access-vpwz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.134253 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "500265b9-e453-4c7e-8b37-f9cd51aa35e3" (UID: "500265b9-e453-4c7e-8b37-f9cd51aa35e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.214103 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.214138 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpwz2\" (UniqueName: \"kubernetes.io/projected/500265b9-e453-4c7e-8b37-f9cd51aa35e3-kube-api-access-vpwz2\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.214152 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500265b9-e453-4c7e-8b37-f9cd51aa35e3-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.549393 4762 generic.go:334] "Generic (PLEG): container finished" podID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerID="2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f" exitCode=0 Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.549458 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerDied","Data":"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f"} Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.550239 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wpvwt" event={"ID":"500265b9-e453-4c7e-8b37-f9cd51aa35e3","Type":"ContainerDied","Data":"b74f33c8ff2ffb8797dbfccbf10fffe992d1d41e07749bcb448db2c59ff2caa9"} Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.550269 4762 scope.go:117] "RemoveContainer" containerID="2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.549511 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wpvwt" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.582850 4762 scope.go:117] "RemoveContainer" containerID="f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.589273 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.598046 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wpvwt"] Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.614545 4762 scope.go:117] "RemoveContainer" containerID="7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.636663 4762 scope.go:117] "RemoveContainer" containerID="2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f" Oct 09 13:51:06 crc kubenswrapper[4762]: E1009 13:51:06.637402 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f\": container with ID starting with 2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f not found: ID does not exist" containerID="2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.637457 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f"} err="failed to get container status \"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f\": rpc error: code = NotFound desc = could not find container \"2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f\": container with ID starting with 2b7202d4fa5c3aa48000d10a41317c9bfd365eb77df241c9e484d384f8ccf54f not found: ID does not exist" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.637479 4762 scope.go:117] "RemoveContainer" containerID="f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a" Oct 09 13:51:06 crc kubenswrapper[4762]: E1009 13:51:06.637911 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a\": container with ID starting with f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a not found: ID does not exist" containerID="f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.637972 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a"} err="failed to get container status \"f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a\": rpc error: code = NotFound desc = could not find container \"f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a\": container with ID starting with f6425b011d47ecaf42d99baf271d83e571dddab843a200badac7bfe9cb0d489a not found: ID does not exist" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.638007 4762 scope.go:117] "RemoveContainer" containerID="7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573" Oct 09 13:51:06 crc kubenswrapper[4762]: E1009 13:51:06.638331 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573\": container with ID starting with 7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573 not found: ID does not exist" containerID="7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.638354 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573"} err="failed to get container status \"7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573\": rpc error: code = NotFound desc = could not find container \"7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573\": container with ID starting with 7d4795de4aa55f129de5c8a93191a892f41debaf35ab5cb3830a72fb58a99573 not found: ID does not exist" Oct 09 13:51:06 crc kubenswrapper[4762]: I1009 13:51:06.974340 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" path="/var/lib/kubelet/pods/500265b9-e453-4c7e-8b37-f9cd51aa35e3/volumes" Oct 09 13:51:10 crc kubenswrapper[4762]: I1009 13:51:10.852506 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:10 crc kubenswrapper[4762]: I1009 13:51:10.852794 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:10 crc kubenswrapper[4762]: I1009 13:51:10.891400 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:11 crc kubenswrapper[4762]: I1009 13:51:11.637992 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:11 crc kubenswrapper[4762]: I1009 13:51:11.673856 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:13 crc kubenswrapper[4762]: I1009 13:51:13.621316 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4tmlp" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="registry-server" containerID="cri-o://428ef9b85564350138e8359a66b8e43b6465dd43cbee50146d327d9ec5aa6046" gracePeriod=2 Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.640581 4762 generic.go:334] "Generic (PLEG): container finished" podID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerID="428ef9b85564350138e8359a66b8e43b6465dd43cbee50146d327d9ec5aa6046" exitCode=0 Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.640681 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerDied","Data":"428ef9b85564350138e8359a66b8e43b6465dd43cbee50146d327d9ec5aa6046"} Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.764253 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.834429 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsp74\" (UniqueName: \"kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74\") pod \"aa402dea-2658-4b4a-a448-e2280c1f3c98\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.834551 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities\") pod \"aa402dea-2658-4b4a-a448-e2280c1f3c98\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.834584 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content\") pod \"aa402dea-2658-4b4a-a448-e2280c1f3c98\" (UID: \"aa402dea-2658-4b4a-a448-e2280c1f3c98\") " Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.835698 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities" (OuterVolumeSpecName: "utilities") pod "aa402dea-2658-4b4a-a448-e2280c1f3c98" (UID: "aa402dea-2658-4b4a-a448-e2280c1f3c98"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.840279 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74" (OuterVolumeSpecName: "kube-api-access-gsp74") pod "aa402dea-2658-4b4a-a448-e2280c1f3c98" (UID: "aa402dea-2658-4b4a-a448-e2280c1f3c98"). InnerVolumeSpecName "kube-api-access-gsp74". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.892279 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa402dea-2658-4b4a-a448-e2280c1f3c98" (UID: "aa402dea-2658-4b4a-a448-e2280c1f3c98"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.936040 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsp74\" (UniqueName: \"kubernetes.io/projected/aa402dea-2658-4b4a-a448-e2280c1f3c98-kube-api-access-gsp74\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.936118 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:14 crc kubenswrapper[4762]: I1009 13:51:14.936135 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa402dea-2658-4b4a-a448-e2280c1f3c98-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.651373 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tmlp" event={"ID":"aa402dea-2658-4b4a-a448-e2280c1f3c98","Type":"ContainerDied","Data":"f87cade34eb7a5da320ee2c52707076b65814c25547f9cfd19461718e84e0687"} Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.651428 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tmlp" Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.651475 4762 scope.go:117] "RemoveContainer" containerID="428ef9b85564350138e8359a66b8e43b6465dd43cbee50146d327d9ec5aa6046" Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.688404 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.689630 4762 scope.go:117] "RemoveContainer" containerID="003f40f1349e4e2042934cc080f92f8715a571547202446d63d8f26fb744a035" Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.700132 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4tmlp"] Oct 09 13:51:15 crc kubenswrapper[4762]: I1009 13:51:15.725420 4762 scope.go:117] "RemoveContainer" containerID="dcac6e52da6cd43bc931fa6b74f126deccb35d92eff4f31ef1c221dab07794b0" Oct 09 13:51:16 crc kubenswrapper[4762]: I1009 13:51:16.977081 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" path="/var/lib/kubelet/pods/aa402dea-2658-4b4a-a448-e2280c1f3c98/volumes" Oct 09 13:51:27 crc kubenswrapper[4762]: I1009 13:51:27.541219 4762 scope.go:117] "RemoveContainer" containerID="07fd9344064d49467a79ce1c38657c51dcfa85dbdc1652c4a2c59fb99501a1e0" Oct 09 13:51:27 crc kubenswrapper[4762]: I1009 13:51:27.574348 4762 scope.go:117] "RemoveContainer" containerID="c8d8d117d7a7cc7c3b031a470b0ee1bca2341ab9daabe576a4e456084be8e7f2" Oct 09 13:52:27 crc kubenswrapper[4762]: I1009 13:52:27.867372 4762 scope.go:117] "RemoveContainer" containerID="39a0ee651bbcd5526e2c7c628c3e0a8cf4e760912db0d28876e7f3c2c1c52b75" Oct 09 13:52:27 crc kubenswrapper[4762]: I1009 13:52:27.913490 4762 scope.go:117] "RemoveContainer" containerID="5bec5512233bb7fefded6edab85945d57deb9974dd2dc7c5887d4c3c86987bd8" Oct 09 13:52:27 crc kubenswrapper[4762]: I1009 13:52:27.940923 4762 scope.go:117] "RemoveContainer" containerID="faba4f945d7fe718ff65fefc602b76d4fc0d02184c4e3eebec8453abeee78454" Oct 09 13:52:27 crc kubenswrapper[4762]: I1009 13:52:27.978987 4762 scope.go:117] "RemoveContainer" containerID="92ccbd0ec3ae6d70023bd3a2966dd8b1cf46d84acf99d75b2e1a9366e032a0b9" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.015646 4762 scope.go:117] "RemoveContainer" containerID="52514876727e3f8cb7f78f04e85f8828644db736dd64bbb55eb9ec12c84a6a78" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.034195 4762 scope.go:117] "RemoveContainer" containerID="2e385bd14e203e4ad5bed79e91a917e47f6669de00d6888a65bc2ff11aa9deea" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.070273 4762 scope.go:117] "RemoveContainer" containerID="cc91c2ff33d27f9343274041d04cb4122ff55b695f477c0add7a66b6376d852e" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.122319 4762 scope.go:117] "RemoveContainer" containerID="be7ce0db056b3871e268563364574b23f09b81e310d7357a309159870c23c165" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.165442 4762 scope.go:117] "RemoveContainer" containerID="fa1145d18f0fbec16d02d4bb83ae8cb23c753337e49c0597f6370eaa7a5f4d1e" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.182857 4762 scope.go:117] "RemoveContainer" containerID="1b7bfb3ca70dcf4718ff5b80adf5e2a0e948797af3bfb684e635ead97d672951" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.202883 4762 scope.go:117] "RemoveContainer" containerID="7c4c8ceaa0d333127b5a811c5ab2eab36a0ca07212c0bb9ecbf25c53b6126818" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.219173 4762 scope.go:117] "RemoveContainer" containerID="47711bb0d3cd74fc5f368a4255a6bbe6f5cd820a881368bd09d8039fe16b962a" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.247229 4762 scope.go:117] "RemoveContainer" containerID="641a0b1793de5d15fd2bf3ee9403e19a637a304d2521c7dd1ef6f5c4e8a5eee4" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.269021 4762 scope.go:117] "RemoveContainer" containerID="fd71be3f0a78bec16c0fab0dce3a39484a5e76d6011729edbd85cf32e87818f4" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.289273 4762 scope.go:117] "RemoveContainer" containerID="7083d406a3f63fd2886bc29f3375460879083ff10e1ca9d1fd6c743eeab66078" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.327419 4762 scope.go:117] "RemoveContainer" containerID="108eadbdff693fa49f902a1ef6d6e21c2f2ad61f0c58b328f8cfb670edd35d7c" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.360319 4762 scope.go:117] "RemoveContainer" containerID="381f6321a6d83ee16e8b9bd0f85a0bb37b99c646715a13175d6e9c46ecaae4c1" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.378513 4762 scope.go:117] "RemoveContainer" containerID="3eb5d46a0831bc9a671cd336434c6355d1e71a3f3e87d654901e51d659111b04" Oct 09 13:52:28 crc kubenswrapper[4762]: I1009 13:52:28.406721 4762 scope.go:117] "RemoveContainer" containerID="7f1165287ed36d82cdee80b95268e900e24a818c960dcdc79f1d39b4329d1267" Oct 09 13:52:41 crc kubenswrapper[4762]: I1009 13:52:41.970035 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:52:41 crc kubenswrapper[4762]: I1009 13:52:41.970795 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:53:11 crc kubenswrapper[4762]: I1009 13:53:11.969405 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:53:11 crc kubenswrapper[4762]: I1009 13:53:11.970987 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.765390 4762 scope.go:117] "RemoveContainer" containerID="5774719b0d337714838e1ccd38e6d6b0f915fade2d424da24558d6f97b1b7481" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.790391 4762 scope.go:117] "RemoveContainer" containerID="bb7094500d79c0b318ce5fd00a0464df84913dcb767170c8cc4e6662117a51d7" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.821406 4762 scope.go:117] "RemoveContainer" containerID="02ccd6d047a5024493f679929acd250cf01b71cda01eefb6f39d2c12d1c6caab" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.857690 4762 scope.go:117] "RemoveContainer" containerID="effb344f1dc334fbf893689c9197673d5eca5fb09afba6dd151e0ccd544b21f5" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.883549 4762 scope.go:117] "RemoveContainer" containerID="c1be68916a3f81c7b6abc9ef288db0ad9f260fbe6b28ab389555e8e399229389" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.919157 4762 scope.go:117] "RemoveContainer" containerID="5acb9f40d37976787a3176932ab61dc744bb85f63d3756a35a7aa17489a36b17" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.947291 4762 scope.go:117] "RemoveContainer" containerID="a841791cf90fa74f319eca5395a3688fe6dc37b0b882cf9492bb39552d6056fe" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.966658 4762 scope.go:117] "RemoveContainer" containerID="fc6d0ce7f1b2e1335bd5df1ffae809fadabf2813cdfe1e329a8458d677bea605" Oct 09 13:53:28 crc kubenswrapper[4762]: I1009 13:53:28.984588 4762 scope.go:117] "RemoveContainer" containerID="75fb11579ca65d227b26b52153b7cd447fed5fcfb5bfc704642e8c32edf4fd96" Oct 09 13:53:29 crc kubenswrapper[4762]: I1009 13:53:29.004335 4762 scope.go:117] "RemoveContainer" containerID="23815f5a561063d232341e5c1bdd222c0042c43c875ca3fd331afec3554cb11f" Oct 09 13:53:29 crc kubenswrapper[4762]: I1009 13:53:29.022412 4762 scope.go:117] "RemoveContainer" containerID="940bb6da909be1f785661405cf69093008229cb06790274277183a2ddc92df99" Oct 09 13:53:29 crc kubenswrapper[4762]: I1009 13:53:29.039857 4762 scope.go:117] "RemoveContainer" containerID="206fcc19fcfff363b71d18cc9c4ed2f9703f84ea849ddaea3eea31afd977e51e" Oct 09 13:53:41 crc kubenswrapper[4762]: I1009 13:53:41.969172 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 13:53:41 crc kubenswrapper[4762]: I1009 13:53:41.969852 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 13:53:41 crc kubenswrapper[4762]: I1009 13:53:41.969908 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 13:53:41 crc kubenswrapper[4762]: I1009 13:53:41.970594 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 13:53:41 crc kubenswrapper[4762]: I1009 13:53:41.970665 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" gracePeriod=600 Oct 09 13:53:42 crc kubenswrapper[4762]: E1009 13:53:42.753823 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:53:42 crc kubenswrapper[4762]: I1009 13:53:42.895303 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" exitCode=0 Oct 09 13:53:42 crc kubenswrapper[4762]: I1009 13:53:42.895359 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014"} Oct 09 13:53:42 crc kubenswrapper[4762]: I1009 13:53:42.895404 4762 scope.go:117] "RemoveContainer" containerID="d81bf816f9df0a4608111c018f22c6c88355ce8d892f536686af61e013f264fc" Oct 09 13:53:42 crc kubenswrapper[4762]: I1009 13:53:42.896130 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:53:42 crc kubenswrapper[4762]: E1009 13:53:42.896501 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:53:56 crc kubenswrapper[4762]: I1009 13:53:56.965326 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:53:56 crc kubenswrapper[4762]: E1009 13:53:56.966139 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:54:10 crc kubenswrapper[4762]: I1009 13:54:10.965363 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:54:10 crc kubenswrapper[4762]: E1009 13:54:10.966314 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:54:23 crc kubenswrapper[4762]: I1009 13:54:23.965457 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:54:23 crc kubenswrapper[4762]: E1009 13:54:23.966200 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.205688 4762 scope.go:117] "RemoveContainer" containerID="36299bd2c4061638873528aea794b7532cb17d870f20137ce2bcd06e839991cf" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.238761 4762 scope.go:117] "RemoveContainer" containerID="abaaec191e407bc97217c5f92e259a12b30db1309fdd44b36367c5b12adcc2c9" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.265905 4762 scope.go:117] "RemoveContainer" containerID="8f98e1f5816b4f5c7f33e6e177e326aadaed2d9b0b12f6488966a24db673528b" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.290255 4762 scope.go:117] "RemoveContainer" containerID="f76d55bec71856953e9258a33e1c4393f28326a0fc15237e8dc359904b57e3fa" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.321728 4762 scope.go:117] "RemoveContainer" containerID="e1cf42e1e55e57e01124a220f5977d775fad1808eb503cb4e273e0937ebaa12d" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.345498 4762 scope.go:117] "RemoveContainer" containerID="b490b4f39e6ea2a1af800e6ebc54b51119c9ebfcf3135bde77ab2dd308d6f0a1" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.364628 4762 scope.go:117] "RemoveContainer" containerID="a0e83bcda39754cb99b6d3b5627fe4751f64cf85b83b8ef9245ca2828a780a9f" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.404053 4762 scope.go:117] "RemoveContainer" containerID="be1b69b00d7fde9a938acf59f47549f46af90d5b80b3f6b68e63608f6eeeefc0" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.421964 4762 scope.go:117] "RemoveContainer" containerID="deec9ce762a4c213a0d8e47f10bd04dbcd4e036735519ad0afeea374a17c4aed" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.439170 4762 scope.go:117] "RemoveContainer" containerID="62e66f771f074a66fee0a95196e2984ab7d958da1caa8b50ceaee3f6fd946317" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.471231 4762 scope.go:117] "RemoveContainer" containerID="33532cdc1385d8d48d5a4f7e806770037c4ebb7b34636a19d9790e0370215882" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.488668 4762 scope.go:117] "RemoveContainer" containerID="5134850a825fe3c5d0245bded97f8dc054bc2356ca74ae7f948894a5bc3d4c48" Oct 09 13:54:29 crc kubenswrapper[4762]: I1009 13:54:29.517475 4762 scope.go:117] "RemoveContainer" containerID="f3b57929bf460bb7882b9c839b7ea6b53b8415ab3d700492d992cd90071380dc" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.172141 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173117 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173137 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173163 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="extract-utilities" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173172 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="extract-utilities" Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173194 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173203 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173220 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="extract-content" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173228 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="extract-content" Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173292 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="extract-utilities" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173302 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="extract-utilities" Oct 09 13:54:34 crc kubenswrapper[4762]: E1009 13:54:34.173314 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="extract-content" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173321 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="extract-content" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173648 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa402dea-2658-4b4a-a448-e2280c1f3c98" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.173677 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="500265b9-e453-4c7e-8b37-f9cd51aa35e3" containerName="registry-server" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.178475 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.182449 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.370315 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krrnq\" (UniqueName: \"kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.370425 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.370469 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.471696 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.471753 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.471825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krrnq\" (UniqueName: \"kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.473130 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.473218 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.499360 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krrnq\" (UniqueName: \"kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq\") pod \"redhat-operators-rr2wk\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.513439 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:34 crc kubenswrapper[4762]: I1009 13:54:34.975359 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:35 crc kubenswrapper[4762]: I1009 13:54:35.315370 4762 generic.go:334] "Generic (PLEG): container finished" podID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerID="2be743a2aeb321804dc6d3512861c91051e81dc6f12312e6ea18f69f6f8b8b41" exitCode=0 Oct 09 13:54:35 crc kubenswrapper[4762]: I1009 13:54:35.315412 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerDied","Data":"2be743a2aeb321804dc6d3512861c91051e81dc6f12312e6ea18f69f6f8b8b41"} Oct 09 13:54:35 crc kubenswrapper[4762]: I1009 13:54:35.315436 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerStarted","Data":"b6157031d389d4342935b2d1e13ab5509fefa5e994d7b5c0ad6b694c6e339830"} Oct 09 13:54:35 crc kubenswrapper[4762]: I1009 13:54:35.318294 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 13:54:35 crc kubenswrapper[4762]: I1009 13:54:35.964970 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:54:35 crc kubenswrapper[4762]: E1009 13:54:35.965539 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:54:37 crc kubenswrapper[4762]: I1009 13:54:37.333679 4762 generic.go:334] "Generic (PLEG): container finished" podID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerID="526926aefc2579f2df68f6f8a838b052eec6f9457cf2f93fc2b37fd153b07189" exitCode=0 Oct 09 13:54:37 crc kubenswrapper[4762]: I1009 13:54:37.333855 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerDied","Data":"526926aefc2579f2df68f6f8a838b052eec6f9457cf2f93fc2b37fd153b07189"} Oct 09 13:54:38 crc kubenswrapper[4762]: I1009 13:54:38.348423 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerStarted","Data":"e70fe935ba15b106516826635da05dabdf6e013b255dd8eb43ed7907e26c58c9"} Oct 09 13:54:38 crc kubenswrapper[4762]: I1009 13:54:38.373125 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rr2wk" podStartSLOduration=1.798522661 podStartE2EDuration="4.373107824s" podCreationTimestamp="2025-10-09 13:54:34 +0000 UTC" firstStartedPulling="2025-10-09 13:54:35.31790296 +0000 UTC m=+1751.091693999" lastFinishedPulling="2025-10-09 13:54:37.892488123 +0000 UTC m=+1753.666279162" observedRunningTime="2025-10-09 13:54:38.367412066 +0000 UTC m=+1754.141203125" watchObservedRunningTime="2025-10-09 13:54:38.373107824 +0000 UTC m=+1754.146898863" Oct 09 13:54:44 crc kubenswrapper[4762]: I1009 13:54:44.514545 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:44 crc kubenswrapper[4762]: I1009 13:54:44.515095 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:44 crc kubenswrapper[4762]: I1009 13:54:44.554546 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:45 crc kubenswrapper[4762]: I1009 13:54:45.447993 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:45 crc kubenswrapper[4762]: I1009 13:54:45.497017 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:47 crc kubenswrapper[4762]: I1009 13:54:47.418585 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rr2wk" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="registry-server" containerID="cri-o://e70fe935ba15b106516826635da05dabdf6e013b255dd8eb43ed7907e26c58c9" gracePeriod=2 Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.429680 4762 generic.go:334] "Generic (PLEG): container finished" podID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerID="e70fe935ba15b106516826635da05dabdf6e013b255dd8eb43ed7907e26c58c9" exitCode=0 Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.429845 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerDied","Data":"e70fe935ba15b106516826635da05dabdf6e013b255dd8eb43ed7907e26c58c9"} Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.696248 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.779690 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krrnq\" (UniqueName: \"kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq\") pod \"47c50d74-b4b0-4137-b46b-fb061b97a782\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.779867 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content\") pod \"47c50d74-b4b0-4137-b46b-fb061b97a782\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.779988 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities\") pod \"47c50d74-b4b0-4137-b46b-fb061b97a782\" (UID: \"47c50d74-b4b0-4137-b46b-fb061b97a782\") " Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.780928 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities" (OuterVolumeSpecName: "utilities") pod "47c50d74-b4b0-4137-b46b-fb061b97a782" (UID: "47c50d74-b4b0-4137-b46b-fb061b97a782"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.785864 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq" (OuterVolumeSpecName: "kube-api-access-krrnq") pod "47c50d74-b4b0-4137-b46b-fb061b97a782" (UID: "47c50d74-b4b0-4137-b46b-fb061b97a782"). InnerVolumeSpecName "kube-api-access-krrnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.882198 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.882244 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krrnq\" (UniqueName: \"kubernetes.io/projected/47c50d74-b4b0-4137-b46b-fb061b97a782-kube-api-access-krrnq\") on node \"crc\" DevicePath \"\"" Oct 09 13:54:48 crc kubenswrapper[4762]: I1009 13:54:48.965778 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:54:48 crc kubenswrapper[4762]: E1009 13:54:48.966137 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.439690 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rr2wk" event={"ID":"47c50d74-b4b0-4137-b46b-fb061b97a782","Type":"ContainerDied","Data":"b6157031d389d4342935b2d1e13ab5509fefa5e994d7b5c0ad6b694c6e339830"} Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.439749 4762 scope.go:117] "RemoveContainer" containerID="e70fe935ba15b106516826635da05dabdf6e013b255dd8eb43ed7907e26c58c9" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.439860 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rr2wk" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.460654 4762 scope.go:117] "RemoveContainer" containerID="526926aefc2579f2df68f6f8a838b052eec6f9457cf2f93fc2b37fd153b07189" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.488748 4762 scope.go:117] "RemoveContainer" containerID="2be743a2aeb321804dc6d3512861c91051e81dc6f12312e6ea18f69f6f8b8b41" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.927142 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47c50d74-b4b0-4137-b46b-fb061b97a782" (UID: "47c50d74-b4b0-4137-b46b-fb061b97a782"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 13:54:49 crc kubenswrapper[4762]: I1009 13:54:49.996979 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c50d74-b4b0-4137-b46b-fb061b97a782-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 13:54:50 crc kubenswrapper[4762]: I1009 13:54:50.079706 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:50 crc kubenswrapper[4762]: I1009 13:54:50.086767 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rr2wk"] Oct 09 13:54:50 crc kubenswrapper[4762]: I1009 13:54:50.974356 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" path="/var/lib/kubelet/pods/47c50d74-b4b0-4137-b46b-fb061b97a782/volumes" Oct 09 13:55:02 crc kubenswrapper[4762]: I1009 13:55:02.966158 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:55:02 crc kubenswrapper[4762]: E1009 13:55:02.967047 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:55:15 crc kubenswrapper[4762]: I1009 13:55:15.965588 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:55:15 crc kubenswrapper[4762]: E1009 13:55:15.966349 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:55:26 crc kubenswrapper[4762]: I1009 13:55:26.965089 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:55:27 crc kubenswrapper[4762]: E1009 13:55:26.965722 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.658764 4762 scope.go:117] "RemoveContainer" containerID="eb7446c781a3fc26e94e1a5d7268c99a91fefd167fad020d62497a265f46becd" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.685845 4762 scope.go:117] "RemoveContainer" containerID="5a2c557d48c0ddb55e75b1d77d66c330f8602fb5590b5f44bfac8794c89b9776" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.702346 4762 scope.go:117] "RemoveContainer" containerID="85c1dff79b65efbb1d0077d4017bfcb9b84c3b899d4399ade4316429900aab80" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.718857 4762 scope.go:117] "RemoveContainer" containerID="71077d84bc79017d2c9e27c25dd96dee36c7b4da89cb52d241107b28fbe63c61" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.762823 4762 scope.go:117] "RemoveContainer" containerID="feaef4de6cce1bbb2d4303079a7194f8a547f09def8c966e05793d81fbd5d515" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.778934 4762 scope.go:117] "RemoveContainer" containerID="1d3806fa170fdbcbf424aff078a59eb080a2c478ec242116e7bd89a246b96fe7" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.814561 4762 scope.go:117] "RemoveContainer" containerID="92507a238bded5661e7b90d68dece979b10a9b1d40801f032f668531f54c7d94" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.855001 4762 scope.go:117] "RemoveContainer" containerID="fc250e3d796c373b1c56af7e1cae32a615f042162866abd13fd13f6d9adf108b" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.881265 4762 scope.go:117] "RemoveContainer" containerID="f4d38c90d6503eb2b13feadda565a6997a41189134eb578e55d77d77cecf5418" Oct 09 13:55:29 crc kubenswrapper[4762]: I1009 13:55:29.900359 4762 scope.go:117] "RemoveContainer" containerID="82d9cb1e06b181f7531ea03f76bb4438570b9f73b7cd80ea245f725456672933" Oct 09 13:55:38 crc kubenswrapper[4762]: I1009 13:55:38.965730 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:55:38 crc kubenswrapper[4762]: E1009 13:55:38.966311 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:55:49 crc kubenswrapper[4762]: I1009 13:55:49.965594 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:55:49 crc kubenswrapper[4762]: E1009 13:55:49.966329 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:56:04 crc kubenswrapper[4762]: I1009 13:56:04.969387 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:56:04 crc kubenswrapper[4762]: E1009 13:56:04.970122 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:56:15 crc kubenswrapper[4762]: I1009 13:56:15.965155 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:56:15 crc kubenswrapper[4762]: E1009 13:56:15.965886 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.027487 4762 scope.go:117] "RemoveContainer" containerID="200af99e4282b55cc024e1a91875d1cc9fe045ed4509ce54d68f1108ab53eb24" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.060856 4762 scope.go:117] "RemoveContainer" containerID="e126396d1ff84e0d194527868fb00994e52bf6887b97b8770716230bf623f284" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.094770 4762 scope.go:117] "RemoveContainer" containerID="80e01643f8cc645bb4e16ae578ab16970af08ccbd3f364e20a9e06ba888c7cb9" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.115513 4762 scope.go:117] "RemoveContainer" containerID="20697adc678f9236f713b97e2f91788f65671a0be29a90e8b44e60c50b457ffb" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.146758 4762 scope.go:117] "RemoveContainer" containerID="c97286ccff2b823606d77c2401198e8d96d31e2569c5791d38c297fc823abe2b" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.169112 4762 scope.go:117] "RemoveContainer" containerID="36cb5740bfd3757c1db9d2f58e72c98daf1730cccfbc6e4f523e38cdf12df964" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.196959 4762 scope.go:117] "RemoveContainer" containerID="9df87f9b1f0f83b44995075ef8017f82960207c724b59d5dcdfd9780e5f0641c" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.213353 4762 scope.go:117] "RemoveContainer" containerID="629e7b66e2c426e986e44b3ca1a61491ba782b0c77b974188f502e357ca696f7" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.241204 4762 scope.go:117] "RemoveContainer" containerID="42df5b43b4196e95eec2dbdedd25a25e5ecb0e0d6a17528ae0a9b5af3b08a452" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.268974 4762 scope.go:117] "RemoveContainer" containerID="2d7aba0bb4082b2fef3387ae71a6137c2f97c32898d88c293c1602e2961e85cb" Oct 09 13:56:30 crc kubenswrapper[4762]: I1009 13:56:30.964874 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:56:30 crc kubenswrapper[4762]: E1009 13:56:30.965562 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:56:44 crc kubenswrapper[4762]: I1009 13:56:44.969024 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:56:44 crc kubenswrapper[4762]: E1009 13:56:44.969860 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:56:58 crc kubenswrapper[4762]: I1009 13:56:58.964489 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:56:58 crc kubenswrapper[4762]: E1009 13:56:58.964960 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:57:09 crc kubenswrapper[4762]: I1009 13:57:09.964618 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:57:09 crc kubenswrapper[4762]: E1009 13:57:09.965427 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:57:24 crc kubenswrapper[4762]: I1009 13:57:24.968364 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:57:24 crc kubenswrapper[4762]: E1009 13:57:24.969117 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:57:39 crc kubenswrapper[4762]: I1009 13:57:39.965725 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:57:39 crc kubenswrapper[4762]: E1009 13:57:39.966474 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:57:50 crc kubenswrapper[4762]: I1009 13:57:50.965145 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:57:50 crc kubenswrapper[4762]: E1009 13:57:50.965890 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:58:05 crc kubenswrapper[4762]: I1009 13:58:05.965569 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:58:05 crc kubenswrapper[4762]: E1009 13:58:05.966584 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:58:20 crc kubenswrapper[4762]: I1009 13:58:20.965101 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:58:20 crc kubenswrapper[4762]: E1009 13:58:20.967233 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:58:35 crc kubenswrapper[4762]: I1009 13:58:35.965396 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:58:35 crc kubenswrapper[4762]: E1009 13:58:35.966116 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 13:58:50 crc kubenswrapper[4762]: I1009 13:58:50.964928 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 13:58:51 crc kubenswrapper[4762]: I1009 13:58:51.294595 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9"} Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.148988 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt"] Oct 09 14:00:00 crc kubenswrapper[4762]: E1009 14:00:00.149857 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="extract-content" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.149873 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="extract-content" Oct 09 14:00:00 crc kubenswrapper[4762]: E1009 14:00:00.149889 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="registry-server" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.149896 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="registry-server" Oct 09 14:00:00 crc kubenswrapper[4762]: E1009 14:00:00.149931 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="extract-utilities" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.149937 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="extract-utilities" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.150081 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c50d74-b4b0-4137-b46b-fb061b97a782" containerName="registry-server" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.150539 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.153909 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.153909 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.168424 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt"] Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.284178 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.284233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.284271 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stgvm\" (UniqueName: \"kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.385125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.385190 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.385229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stgvm\" (UniqueName: \"kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.386076 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.390817 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.401912 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stgvm\" (UniqueName: \"kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm\") pod \"collect-profiles-29333640-cg4gt\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.473867 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:00 crc kubenswrapper[4762]: I1009 14:00:00.899738 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt"] Oct 09 14:00:01 crc kubenswrapper[4762]: I1009 14:00:01.819699 4762 generic.go:334] "Generic (PLEG): container finished" podID="dfaa232b-5c62-4c60-ad6c-486aa735578e" containerID="e9a77030f4a5d7eb7299e1a5560aef53355916c12293e8c7c5208bdb41c12b34" exitCode=0 Oct 09 14:00:01 crc kubenswrapper[4762]: I1009 14:00:01.819938 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" event={"ID":"dfaa232b-5c62-4c60-ad6c-486aa735578e","Type":"ContainerDied","Data":"e9a77030f4a5d7eb7299e1a5560aef53355916c12293e8c7c5208bdb41c12b34"} Oct 09 14:00:01 crc kubenswrapper[4762]: I1009 14:00:01.819998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" event={"ID":"dfaa232b-5c62-4c60-ad6c-486aa735578e","Type":"ContainerStarted","Data":"da11c5ee08ef4f82a7e60553aba53f48210260a26493fec3b82d1c53adab593b"} Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.101505 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.129764 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume\") pod \"dfaa232b-5c62-4c60-ad6c-486aa735578e\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.129908 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume\") pod \"dfaa232b-5c62-4c60-ad6c-486aa735578e\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.129938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stgvm\" (UniqueName: \"kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm\") pod \"dfaa232b-5c62-4c60-ad6c-486aa735578e\" (UID: \"dfaa232b-5c62-4c60-ad6c-486aa735578e\") " Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.130654 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume" (OuterVolumeSpecName: "config-volume") pod "dfaa232b-5c62-4c60-ad6c-486aa735578e" (UID: "dfaa232b-5c62-4c60-ad6c-486aa735578e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.136409 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm" (OuterVolumeSpecName: "kube-api-access-stgvm") pod "dfaa232b-5c62-4c60-ad6c-486aa735578e" (UID: "dfaa232b-5c62-4c60-ad6c-486aa735578e"). InnerVolumeSpecName "kube-api-access-stgvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.136547 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dfaa232b-5c62-4c60-ad6c-486aa735578e" (UID: "dfaa232b-5c62-4c60-ad6c-486aa735578e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.231866 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dfaa232b-5c62-4c60-ad6c-486aa735578e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.231946 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stgvm\" (UniqueName: \"kubernetes.io/projected/dfaa232b-5c62-4c60-ad6c-486aa735578e-kube-api-access-stgvm\") on node \"crc\" DevicePath \"\"" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.231960 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dfaa232b-5c62-4c60-ad6c-486aa735578e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.834606 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" event={"ID":"dfaa232b-5c62-4c60-ad6c-486aa735578e","Type":"ContainerDied","Data":"da11c5ee08ef4f82a7e60553aba53f48210260a26493fec3b82d1c53adab593b"} Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.834662 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt" Oct 09 14:00:03 crc kubenswrapper[4762]: I1009 14:00:03.834666 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da11c5ee08ef4f82a7e60553aba53f48210260a26493fec3b82d1c53adab593b" Oct 09 14:00:04 crc kubenswrapper[4762]: I1009 14:00:04.172818 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht"] Oct 09 14:00:04 crc kubenswrapper[4762]: I1009 14:00:04.178525 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333595-d52ht"] Oct 09 14:00:04 crc kubenswrapper[4762]: I1009 14:00:04.973764 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b828691-c7c6-4899-b524-d867e7f151a4" path="/var/lib/kubelet/pods/3b828691-c7c6-4899-b524-d867e7f151a4/volumes" Oct 09 14:00:30 crc kubenswrapper[4762]: I1009 14:00:30.468683 4762 scope.go:117] "RemoveContainer" containerID="55560df7338145e9510a0524ec0899fa8addbb1ac720d828e8d646a0e0178c4f" Oct 09 14:01:11 crc kubenswrapper[4762]: I1009 14:01:11.969074 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:01:11 crc kubenswrapper[4762]: I1009 14:01:11.969753 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.413333 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:41 crc kubenswrapper[4762]: E1009 14:01:41.415282 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfaa232b-5c62-4c60-ad6c-486aa735578e" containerName="collect-profiles" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.415321 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfaa232b-5c62-4c60-ad6c-486aa735578e" containerName="collect-profiles" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.415502 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfaa232b-5c62-4c60-ad6c-486aa735578e" containerName="collect-profiles" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.417068 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.430016 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.597794 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f28v\" (UniqueName: \"kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.597864 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.597925 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.698915 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.699041 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f28v\" (UniqueName: \"kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.699436 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.699494 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.699540 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.727420 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f28v\" (UniqueName: \"kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v\") pod \"redhat-marketplace-fsmv7\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.738025 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.969418 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:01:41 crc kubenswrapper[4762]: I1009 14:01:41.969835 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:01:42 crc kubenswrapper[4762]: I1009 14:01:42.225206 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:42 crc kubenswrapper[4762]: I1009 14:01:42.538997 4762 generic.go:334] "Generic (PLEG): container finished" podID="1da277dd-aa67-4416-a506-43ddb7566520" containerID="81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce" exitCode=0 Oct 09 14:01:42 crc kubenswrapper[4762]: I1009 14:01:42.539230 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerDied","Data":"81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce"} Oct 09 14:01:42 crc kubenswrapper[4762]: I1009 14:01:42.539293 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerStarted","Data":"866d9b51ca43202e946808f60b15da922d7b7e8195ce613d8983d6fbec0c455d"} Oct 09 14:01:42 crc kubenswrapper[4762]: I1009 14:01:42.540624 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:01:44 crc kubenswrapper[4762]: I1009 14:01:44.554895 4762 generic.go:334] "Generic (PLEG): container finished" podID="1da277dd-aa67-4416-a506-43ddb7566520" containerID="96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8" exitCode=0 Oct 09 14:01:44 crc kubenswrapper[4762]: I1009 14:01:44.555025 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerDied","Data":"96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8"} Oct 09 14:01:45 crc kubenswrapper[4762]: I1009 14:01:45.563717 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerStarted","Data":"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0"} Oct 09 14:01:45 crc kubenswrapper[4762]: I1009 14:01:45.586110 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fsmv7" podStartSLOduration=2.134438726 podStartE2EDuration="4.586077796s" podCreationTimestamp="2025-10-09 14:01:41 +0000 UTC" firstStartedPulling="2025-10-09 14:01:42.540353766 +0000 UTC m=+2178.314144805" lastFinishedPulling="2025-10-09 14:01:44.991992836 +0000 UTC m=+2180.765783875" observedRunningTime="2025-10-09 14:01:45.580908151 +0000 UTC m=+2181.354699190" watchObservedRunningTime="2025-10-09 14:01:45.586077796 +0000 UTC m=+2181.359868835" Oct 09 14:01:51 crc kubenswrapper[4762]: I1009 14:01:51.738540 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:51 crc kubenswrapper[4762]: I1009 14:01:51.738946 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:51 crc kubenswrapper[4762]: I1009 14:01:51.782381 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:52 crc kubenswrapper[4762]: I1009 14:01:52.659984 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:52 crc kubenswrapper[4762]: I1009 14:01:52.715208 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:54 crc kubenswrapper[4762]: I1009 14:01:54.630944 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fsmv7" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="registry-server" containerID="cri-o://f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0" gracePeriod=2 Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.006807 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.204967 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content\") pod \"1da277dd-aa67-4416-a506-43ddb7566520\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.205116 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities\") pod \"1da277dd-aa67-4416-a506-43ddb7566520\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.205215 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9f28v\" (UniqueName: \"kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v\") pod \"1da277dd-aa67-4416-a506-43ddb7566520\" (UID: \"1da277dd-aa67-4416-a506-43ddb7566520\") " Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.206252 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities" (OuterVolumeSpecName: "utilities") pod "1da277dd-aa67-4416-a506-43ddb7566520" (UID: "1da277dd-aa67-4416-a506-43ddb7566520"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.216423 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v" (OuterVolumeSpecName: "kube-api-access-9f28v") pod "1da277dd-aa67-4416-a506-43ddb7566520" (UID: "1da277dd-aa67-4416-a506-43ddb7566520"). InnerVolumeSpecName "kube-api-access-9f28v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.219352 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1da277dd-aa67-4416-a506-43ddb7566520" (UID: "1da277dd-aa67-4416-a506-43ddb7566520"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.306952 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.306999 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9f28v\" (UniqueName: \"kubernetes.io/projected/1da277dd-aa67-4416-a506-43ddb7566520-kube-api-access-9f28v\") on node \"crc\" DevicePath \"\"" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.307013 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da277dd-aa67-4416-a506-43ddb7566520-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.639461 4762 generic.go:334] "Generic (PLEG): container finished" podID="1da277dd-aa67-4416-a506-43ddb7566520" containerID="f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0" exitCode=0 Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.639504 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerDied","Data":"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0"} Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.639530 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fsmv7" event={"ID":"1da277dd-aa67-4416-a506-43ddb7566520","Type":"ContainerDied","Data":"866d9b51ca43202e946808f60b15da922d7b7e8195ce613d8983d6fbec0c455d"} Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.639524 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fsmv7" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.639542 4762 scope.go:117] "RemoveContainer" containerID="f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.670922 4762 scope.go:117] "RemoveContainer" containerID="96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.675082 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.684773 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fsmv7"] Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.692780 4762 scope.go:117] "RemoveContainer" containerID="81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.722528 4762 scope.go:117] "RemoveContainer" containerID="f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0" Oct 09 14:01:55 crc kubenswrapper[4762]: E1009 14:01:55.722993 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0\": container with ID starting with f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0 not found: ID does not exist" containerID="f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.723032 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0"} err="failed to get container status \"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0\": rpc error: code = NotFound desc = could not find container \"f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0\": container with ID starting with f592dba595e8b3e247ab1e7ecfdbd59aa1a80e75c159028c61136e17b716ecb0 not found: ID does not exist" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.723064 4762 scope.go:117] "RemoveContainer" containerID="96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8" Oct 09 14:01:55 crc kubenswrapper[4762]: E1009 14:01:55.723414 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8\": container with ID starting with 96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8 not found: ID does not exist" containerID="96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.723448 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8"} err="failed to get container status \"96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8\": rpc error: code = NotFound desc = could not find container \"96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8\": container with ID starting with 96ba38606c9e984b4b60f4a598123cfcbd0432a913750df3a5626de52a0161e8 not found: ID does not exist" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.723500 4762 scope.go:117] "RemoveContainer" containerID="81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce" Oct 09 14:01:55 crc kubenswrapper[4762]: E1009 14:01:55.724173 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce\": container with ID starting with 81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce not found: ID does not exist" containerID="81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce" Oct 09 14:01:55 crc kubenswrapper[4762]: I1009 14:01:55.724203 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce"} err="failed to get container status \"81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce\": rpc error: code = NotFound desc = could not find container \"81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce\": container with ID starting with 81e063efe960e362ae6fe600e3b290130c98869591d4b49204a73853359a57ce not found: ID does not exist" Oct 09 14:01:56 crc kubenswrapper[4762]: I1009 14:01:56.976577 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1da277dd-aa67-4416-a506-43ddb7566520" path="/var/lib/kubelet/pods/1da277dd-aa67-4416-a506-43ddb7566520/volumes" Oct 09 14:02:11 crc kubenswrapper[4762]: I1009 14:02:11.969693 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:02:11 crc kubenswrapper[4762]: I1009 14:02:11.970295 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:02:11 crc kubenswrapper[4762]: I1009 14:02:11.970345 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:02:11 crc kubenswrapper[4762]: I1009 14:02:11.970937 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:02:11 crc kubenswrapper[4762]: I1009 14:02:11.970998 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9" gracePeriod=600 Oct 09 14:02:12 crc kubenswrapper[4762]: I1009 14:02:12.763202 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9" exitCode=0 Oct 09 14:02:12 crc kubenswrapper[4762]: I1009 14:02:12.763284 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9"} Oct 09 14:02:12 crc kubenswrapper[4762]: I1009 14:02:12.764089 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73"} Oct 09 14:02:12 crc kubenswrapper[4762]: I1009 14:02:12.764128 4762 scope.go:117] "RemoveContainer" containerID="a2afb536cc344719717b53d10099260b84d6f0ea47ea651fc9cf7b2fe8a2b014" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.988918 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:33 crc kubenswrapper[4762]: E1009 14:03:33.989776 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="extract-content" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.989789 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="extract-content" Oct 09 14:03:33 crc kubenswrapper[4762]: E1009 14:03:33.989815 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="registry-server" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.989821 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="registry-server" Oct 09 14:03:33 crc kubenswrapper[4762]: E1009 14:03:33.989841 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="extract-utilities" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.989847 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="extract-utilities" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.989979 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da277dd-aa67-4416-a506-43ddb7566520" containerName="registry-server" Oct 09 14:03:33 crc kubenswrapper[4762]: I1009 14:03:33.990944 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:33.999978 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.029223 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.029680 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brh9w\" (UniqueName: \"kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.029824 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.130840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.130943 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.130977 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brh9w\" (UniqueName: \"kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.131765 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.132094 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.151905 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brh9w\" (UniqueName: \"kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w\") pod \"certified-operators-lqwpt\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.181838 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.185591 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.190289 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.315144 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.333104 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.333214 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.333257 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7qbp\" (UniqueName: \"kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.436839 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7qbp\" (UniqueName: \"kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.436921 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.437015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.437563 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.439340 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.473754 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7qbp\" (UniqueName: \"kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp\") pod \"community-operators-h6std\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.509957 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.809287 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:34 crc kubenswrapper[4762]: I1009 14:03:34.860160 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.366168 4762 generic.go:334] "Generic (PLEG): container finished" podID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerID="7da5f3357d127bcf601d777333655a993d3366d5a2b69d9ad3bc8b34ff70b43b" exitCode=0 Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.366227 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerDied","Data":"7da5f3357d127bcf601d777333655a993d3366d5a2b69d9ad3bc8b34ff70b43b"} Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.366534 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerStarted","Data":"68f4484ef2d9f561c02124d60d901e9fc878b375e0c67953775bc96884baadd0"} Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.368336 4762 generic.go:334] "Generic (PLEG): container finished" podID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerID="a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5" exitCode=0 Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.368401 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerDied","Data":"a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5"} Oct 09 14:03:35 crc kubenswrapper[4762]: I1009 14:03:35.368422 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerStarted","Data":"a2219738f6af92bc7d0e7635e4f47db2d222453a05a6f5e182360f78d7cafd7b"} Oct 09 14:03:37 crc kubenswrapper[4762]: I1009 14:03:37.385284 4762 generic.go:334] "Generic (PLEG): container finished" podID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerID="6c509b85c0c67ff7fd6618953d8fbd235569deaefb04829ed283ea1f97ba655f" exitCode=0 Oct 09 14:03:37 crc kubenswrapper[4762]: I1009 14:03:37.385352 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerDied","Data":"6c509b85c0c67ff7fd6618953d8fbd235569deaefb04829ed283ea1f97ba655f"} Oct 09 14:03:38 crc kubenswrapper[4762]: I1009 14:03:38.394927 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerStarted","Data":"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9"} Oct 09 14:03:39 crc kubenswrapper[4762]: I1009 14:03:39.404203 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerStarted","Data":"302e8e6f23defb82ca7d2ec022c82f22b2abf9aa3e409c3d58c0020346331cf3"} Oct 09 14:03:39 crc kubenswrapper[4762]: I1009 14:03:39.407388 4762 generic.go:334] "Generic (PLEG): container finished" podID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerID="50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9" exitCode=0 Oct 09 14:03:39 crc kubenswrapper[4762]: I1009 14:03:39.407424 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerDied","Data":"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9"} Oct 09 14:03:39 crc kubenswrapper[4762]: I1009 14:03:39.426253 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h6std" podStartSLOduration=2.345210785 podStartE2EDuration="5.426235908s" podCreationTimestamp="2025-10-09 14:03:34 +0000 UTC" firstStartedPulling="2025-10-09 14:03:35.368050366 +0000 UTC m=+2291.141841405" lastFinishedPulling="2025-10-09 14:03:38.449075489 +0000 UTC m=+2294.222866528" observedRunningTime="2025-10-09 14:03:39.424537774 +0000 UTC m=+2295.198328813" watchObservedRunningTime="2025-10-09 14:03:39.426235908 +0000 UTC m=+2295.200026947" Oct 09 14:03:42 crc kubenswrapper[4762]: I1009 14:03:42.431700 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerStarted","Data":"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a"} Oct 09 14:03:42 crc kubenswrapper[4762]: I1009 14:03:42.452835 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lqwpt" podStartSLOduration=3.583588683 podStartE2EDuration="9.452818185s" podCreationTimestamp="2025-10-09 14:03:33 +0000 UTC" firstStartedPulling="2025-10-09 14:03:35.370130561 +0000 UTC m=+2291.143921600" lastFinishedPulling="2025-10-09 14:03:41.239360063 +0000 UTC m=+2297.013151102" observedRunningTime="2025-10-09 14:03:42.448301387 +0000 UTC m=+2298.222092426" watchObservedRunningTime="2025-10-09 14:03:42.452818185 +0000 UTC m=+2298.226609224" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.316428 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.316781 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.359709 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.511094 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.511149 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:44 crc kubenswrapper[4762]: I1009 14:03:44.551841 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:45 crc kubenswrapper[4762]: I1009 14:03:45.491582 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:47 crc kubenswrapper[4762]: I1009 14:03:47.576723 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:47 crc kubenswrapper[4762]: I1009 14:03:47.576961 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h6std" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="registry-server" containerID="cri-o://302e8e6f23defb82ca7d2ec022c82f22b2abf9aa3e409c3d58c0020346331cf3" gracePeriod=2 Oct 09 14:03:48 crc kubenswrapper[4762]: I1009 14:03:48.476767 4762 generic.go:334] "Generic (PLEG): container finished" podID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerID="302e8e6f23defb82ca7d2ec022c82f22b2abf9aa3e409c3d58c0020346331cf3" exitCode=0 Oct 09 14:03:48 crc kubenswrapper[4762]: I1009 14:03:48.476853 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerDied","Data":"302e8e6f23defb82ca7d2ec022c82f22b2abf9aa3e409c3d58c0020346331cf3"} Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.406514 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.485496 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h6std" event={"ID":"3adc77c2-2e6f-456c-a8bb-c57a385e4a31","Type":"ContainerDied","Data":"68f4484ef2d9f561c02124d60d901e9fc878b375e0c67953775bc96884baadd0"} Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.485575 4762 scope.go:117] "RemoveContainer" containerID="302e8e6f23defb82ca7d2ec022c82f22b2abf9aa3e409c3d58c0020346331cf3" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.485573 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h6std" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.504250 4762 scope.go:117] "RemoveContainer" containerID="6c509b85c0c67ff7fd6618953d8fbd235569deaefb04829ed283ea1f97ba655f" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.524453 4762 scope.go:117] "RemoveContainer" containerID="7da5f3357d127bcf601d777333655a993d3366d5a2b69d9ad3bc8b34ff70b43b" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.560236 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7qbp\" (UniqueName: \"kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp\") pod \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.560383 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities\") pod \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.560425 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content\") pod \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\" (UID: \"3adc77c2-2e6f-456c-a8bb-c57a385e4a31\") " Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.562298 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities" (OuterVolumeSpecName: "utilities") pod "3adc77c2-2e6f-456c-a8bb-c57a385e4a31" (UID: "3adc77c2-2e6f-456c-a8bb-c57a385e4a31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.567718 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp" (OuterVolumeSpecName: "kube-api-access-w7qbp") pod "3adc77c2-2e6f-456c-a8bb-c57a385e4a31" (UID: "3adc77c2-2e6f-456c-a8bb-c57a385e4a31"). InnerVolumeSpecName "kube-api-access-w7qbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.662487 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.663198 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7qbp\" (UniqueName: \"kubernetes.io/projected/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-kube-api-access-w7qbp\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.937833 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3adc77c2-2e6f-456c-a8bb-c57a385e4a31" (UID: "3adc77c2-2e6f-456c-a8bb-c57a385e4a31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:03:49 crc kubenswrapper[4762]: I1009 14:03:49.968077 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3adc77c2-2e6f-456c-a8bb-c57a385e4a31-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:50 crc kubenswrapper[4762]: I1009 14:03:50.118246 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:50 crc kubenswrapper[4762]: I1009 14:03:50.124394 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h6std"] Oct 09 14:03:50 crc kubenswrapper[4762]: I1009 14:03:50.975297 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" path="/var/lib/kubelet/pods/3adc77c2-2e6f-456c-a8bb-c57a385e4a31/volumes" Oct 09 14:03:54 crc kubenswrapper[4762]: I1009 14:03:54.362817 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:54 crc kubenswrapper[4762]: I1009 14:03:54.421211 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:54 crc kubenswrapper[4762]: I1009 14:03:54.524129 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lqwpt" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="registry-server" containerID="cri-o://8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a" gracePeriod=2 Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.503299 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.537143 4762 generic.go:334] "Generic (PLEG): container finished" podID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerID="8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a" exitCode=0 Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.537216 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerDied","Data":"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a"} Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.537223 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lqwpt" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.537267 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lqwpt" event={"ID":"58be84fe-0f61-4f11-804c-4ae6eec26a66","Type":"ContainerDied","Data":"a2219738f6af92bc7d0e7635e4f47db2d222453a05a6f5e182360f78d7cafd7b"} Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.537290 4762 scope.go:117] "RemoveContainer" containerID="8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.558315 4762 scope.go:117] "RemoveContainer" containerID="50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.577844 4762 scope.go:117] "RemoveContainer" containerID="a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.601600 4762 scope.go:117] "RemoveContainer" containerID="8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a" Oct 09 14:03:55 crc kubenswrapper[4762]: E1009 14:03:55.602277 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a\": container with ID starting with 8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a not found: ID does not exist" containerID="8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.602337 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a"} err="failed to get container status \"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a\": rpc error: code = NotFound desc = could not find container \"8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a\": container with ID starting with 8c954d2dc1c50cd95b1893279080ce903d4f8ebfbd17982368e70f8cdb67a19a not found: ID does not exist" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.602370 4762 scope.go:117] "RemoveContainer" containerID="50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9" Oct 09 14:03:55 crc kubenswrapper[4762]: E1009 14:03:55.602708 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9\": container with ID starting with 50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9 not found: ID does not exist" containerID="50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.602746 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9"} err="failed to get container status \"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9\": rpc error: code = NotFound desc = could not find container \"50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9\": container with ID starting with 50b0f4cb6c3239621c2cbddda20cafda06cfdd60463c0796be1a5908c63466c9 not found: ID does not exist" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.602766 4762 scope.go:117] "RemoveContainer" containerID="a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5" Oct 09 14:03:55 crc kubenswrapper[4762]: E1009 14:03:55.603010 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5\": container with ID starting with a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5 not found: ID does not exist" containerID="a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.603044 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5"} err="failed to get container status \"a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5\": rpc error: code = NotFound desc = could not find container \"a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5\": container with ID starting with a4d4f6e35eb3262a990fe77bce1ca29b4b3ea94ab5c7237b76b6339c9de188b5 not found: ID does not exist" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.647195 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content\") pod \"58be84fe-0f61-4f11-804c-4ae6eec26a66\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.649266 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brh9w\" (UniqueName: \"kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w\") pod \"58be84fe-0f61-4f11-804c-4ae6eec26a66\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.649335 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities\") pod \"58be84fe-0f61-4f11-804c-4ae6eec26a66\" (UID: \"58be84fe-0f61-4f11-804c-4ae6eec26a66\") " Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.650315 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities" (OuterVolumeSpecName: "utilities") pod "58be84fe-0f61-4f11-804c-4ae6eec26a66" (UID: "58be84fe-0f61-4f11-804c-4ae6eec26a66"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.655827 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w" (OuterVolumeSpecName: "kube-api-access-brh9w") pod "58be84fe-0f61-4f11-804c-4ae6eec26a66" (UID: "58be84fe-0f61-4f11-804c-4ae6eec26a66"). InnerVolumeSpecName "kube-api-access-brh9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.697057 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58be84fe-0f61-4f11-804c-4ae6eec26a66" (UID: "58be84fe-0f61-4f11-804c-4ae6eec26a66"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.751388 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brh9w\" (UniqueName: \"kubernetes.io/projected/58be84fe-0f61-4f11-804c-4ae6eec26a66-kube-api-access-brh9w\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.751427 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.751440 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be84fe-0f61-4f11-804c-4ae6eec26a66-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.876318 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:55 crc kubenswrapper[4762]: I1009 14:03:55.882298 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lqwpt"] Oct 09 14:03:56 crc kubenswrapper[4762]: I1009 14:03:56.974583 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" path="/var/lib/kubelet/pods/58be84fe-0f61-4f11-804c-4ae6eec26a66/volumes" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.009417 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010331 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="extract-utilities" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010346 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="extract-utilities" Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010364 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="extract-content" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010372 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="extract-content" Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010400 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010408 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010417 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="extract-utilities" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010425 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="extract-utilities" Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010440 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010447 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: E1009 14:04:36.010463 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="extract-content" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010470 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="extract-content" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010625 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3adc77c2-2e6f-456c-a8bb-c57a385e4a31" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.010661 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="58be84fe-0f61-4f11-804c-4ae6eec26a66" containerName="registry-server" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.011891 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.023311 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.109956 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.110099 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.110177 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lr4f\" (UniqueName: \"kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.210973 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.211042 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.211112 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lr4f\" (UniqueName: \"kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.211452 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.212147 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.247930 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lr4f\" (UniqueName: \"kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f\") pod \"redhat-operators-9m2q8\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.334012 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.784431 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:36 crc kubenswrapper[4762]: I1009 14:04:36.835459 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerStarted","Data":"4070feb2f3828c9e563bd3ac25d910dbe2fe4ab273ac9640d13054264082cad8"} Oct 09 14:04:37 crc kubenswrapper[4762]: I1009 14:04:37.845957 4762 generic.go:334] "Generic (PLEG): container finished" podID="a278433d-4701-4cf9-9fc0-354d836e108a" containerID="6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3" exitCode=0 Oct 09 14:04:37 crc kubenswrapper[4762]: I1009 14:04:37.846055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerDied","Data":"6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3"} Oct 09 14:04:39 crc kubenswrapper[4762]: I1009 14:04:39.865980 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerStarted","Data":"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc"} Oct 09 14:04:40 crc kubenswrapper[4762]: I1009 14:04:40.874705 4762 generic.go:334] "Generic (PLEG): container finished" podID="a278433d-4701-4cf9-9fc0-354d836e108a" containerID="2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc" exitCode=0 Oct 09 14:04:40 crc kubenswrapper[4762]: I1009 14:04:40.874760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerDied","Data":"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc"} Oct 09 14:04:41 crc kubenswrapper[4762]: I1009 14:04:41.883084 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerStarted","Data":"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221"} Oct 09 14:04:41 crc kubenswrapper[4762]: I1009 14:04:41.902950 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9m2q8" podStartSLOduration=3.070604926 podStartE2EDuration="6.902926411s" podCreationTimestamp="2025-10-09 14:04:35 +0000 UTC" firstStartedPulling="2025-10-09 14:04:37.848403703 +0000 UTC m=+2353.622194752" lastFinishedPulling="2025-10-09 14:04:41.680725198 +0000 UTC m=+2357.454516237" observedRunningTime="2025-10-09 14:04:41.902300955 +0000 UTC m=+2357.676091994" watchObservedRunningTime="2025-10-09 14:04:41.902926411 +0000 UTC m=+2357.676717450" Oct 09 14:04:41 crc kubenswrapper[4762]: I1009 14:04:41.970263 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:04:41 crc kubenswrapper[4762]: I1009 14:04:41.970838 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:04:46 crc kubenswrapper[4762]: I1009 14:04:46.335178 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:46 crc kubenswrapper[4762]: I1009 14:04:46.336516 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:46 crc kubenswrapper[4762]: I1009 14:04:46.385581 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:46 crc kubenswrapper[4762]: I1009 14:04:46.958405 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:47 crc kubenswrapper[4762]: I1009 14:04:47.011469 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:48 crc kubenswrapper[4762]: I1009 14:04:48.929295 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9m2q8" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="registry-server" containerID="cri-o://625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221" gracePeriod=2 Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.308729 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.404227 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lr4f\" (UniqueName: \"kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f\") pod \"a278433d-4701-4cf9-9fc0-354d836e108a\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.404362 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") pod \"a278433d-4701-4cf9-9fc0-354d836e108a\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.404395 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities\") pod \"a278433d-4701-4cf9-9fc0-354d836e108a\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.405517 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities" (OuterVolumeSpecName: "utilities") pod "a278433d-4701-4cf9-9fc0-354d836e108a" (UID: "a278433d-4701-4cf9-9fc0-354d836e108a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.409777 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f" (OuterVolumeSpecName: "kube-api-access-4lr4f") pod "a278433d-4701-4cf9-9fc0-354d836e108a" (UID: "a278433d-4701-4cf9-9fc0-354d836e108a"). InnerVolumeSpecName "kube-api-access-4lr4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.506527 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a278433d-4701-4cf9-9fc0-354d836e108a" (UID: "a278433d-4701-4cf9-9fc0-354d836e108a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.507090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") pod \"a278433d-4701-4cf9-9fc0-354d836e108a\" (UID: \"a278433d-4701-4cf9-9fc0-354d836e108a\") " Oct 09 14:04:49 crc kubenswrapper[4762]: W1009 14:04:49.507211 4762 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/a278433d-4701-4cf9-9fc0-354d836e108a/volumes/kubernetes.io~empty-dir/catalog-content Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.507235 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a278433d-4701-4cf9-9fc0-354d836e108a" (UID: "a278433d-4701-4cf9-9fc0-354d836e108a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.507941 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.507990 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a278433d-4701-4cf9-9fc0-354d836e108a-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.508011 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lr4f\" (UniqueName: \"kubernetes.io/projected/a278433d-4701-4cf9-9fc0-354d836e108a-kube-api-access-4lr4f\") on node \"crc\" DevicePath \"\"" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.937095 4762 generic.go:334] "Generic (PLEG): container finished" podID="a278433d-4701-4cf9-9fc0-354d836e108a" containerID="625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221" exitCode=0 Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.937143 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerDied","Data":"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221"} Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.937169 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9m2q8" event={"ID":"a278433d-4701-4cf9-9fc0-354d836e108a","Type":"ContainerDied","Data":"4070feb2f3828c9e563bd3ac25d910dbe2fe4ab273ac9640d13054264082cad8"} Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.937186 4762 scope.go:117] "RemoveContainer" containerID="625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.937219 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9m2q8" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.957027 4762 scope.go:117] "RemoveContainer" containerID="2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc" Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.970191 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.975981 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9m2q8"] Oct 09 14:04:49 crc kubenswrapper[4762]: I1009 14:04:49.991021 4762 scope.go:117] "RemoveContainer" containerID="6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.024085 4762 scope.go:117] "RemoveContainer" containerID="625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221" Oct 09 14:04:50 crc kubenswrapper[4762]: E1009 14:04:50.024653 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221\": container with ID starting with 625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221 not found: ID does not exist" containerID="625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.024700 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221"} err="failed to get container status \"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221\": rpc error: code = NotFound desc = could not find container \"625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221\": container with ID starting with 625df75b99d647e0b616a863dd66181c63357d1f2b103ecf468e826064b11221 not found: ID does not exist" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.024720 4762 scope.go:117] "RemoveContainer" containerID="2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc" Oct 09 14:04:50 crc kubenswrapper[4762]: E1009 14:04:50.025107 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc\": container with ID starting with 2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc not found: ID does not exist" containerID="2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.025166 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc"} err="failed to get container status \"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc\": rpc error: code = NotFound desc = could not find container \"2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc\": container with ID starting with 2d4bfb13c77d2d99be40050dda5417bc6058163ce31ae5b71a3f2124fa9453bc not found: ID does not exist" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.025205 4762 scope.go:117] "RemoveContainer" containerID="6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3" Oct 09 14:04:50 crc kubenswrapper[4762]: E1009 14:04:50.025659 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3\": container with ID starting with 6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3 not found: ID does not exist" containerID="6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.025686 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3"} err="failed to get container status \"6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3\": rpc error: code = NotFound desc = could not find container \"6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3\": container with ID starting with 6054bf266e3b0f40308bf7f446a706cc22bc6d6f0bc829387911a40f82c9dcf3 not found: ID does not exist" Oct 09 14:04:50 crc kubenswrapper[4762]: I1009 14:04:50.974106 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" path="/var/lib/kubelet/pods/a278433d-4701-4cf9-9fc0-354d836e108a/volumes" Oct 09 14:05:11 crc kubenswrapper[4762]: I1009 14:05:11.969827 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:05:11 crc kubenswrapper[4762]: I1009 14:05:11.970380 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:05:41 crc kubenswrapper[4762]: I1009 14:05:41.969491 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:05:41 crc kubenswrapper[4762]: I1009 14:05:41.969906 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:05:41 crc kubenswrapper[4762]: I1009 14:05:41.969939 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:05:41 crc kubenswrapper[4762]: I1009 14:05:41.970260 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:05:41 crc kubenswrapper[4762]: I1009 14:05:41.970311 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" gracePeriod=600 Oct 09 14:05:42 crc kubenswrapper[4762]: E1009 14:05:42.101213 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:05:42 crc kubenswrapper[4762]: I1009 14:05:42.333729 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" exitCode=0 Oct 09 14:05:42 crc kubenswrapper[4762]: I1009 14:05:42.333778 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73"} Oct 09 14:05:42 crc kubenswrapper[4762]: I1009 14:05:42.333816 4762 scope.go:117] "RemoveContainer" containerID="f559e83ba03b87fe2b68fa8071d2d50ca4f05b52624252aa5d64ab5a305f1ae9" Oct 09 14:05:42 crc kubenswrapper[4762]: I1009 14:05:42.334510 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:05:42 crc kubenswrapper[4762]: E1009 14:05:42.334814 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:05:53 crc kubenswrapper[4762]: I1009 14:05:53.965463 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:05:53 crc kubenswrapper[4762]: E1009 14:05:53.966240 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:06:04 crc kubenswrapper[4762]: I1009 14:06:04.969174 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:06:04 crc kubenswrapper[4762]: E1009 14:06:04.969940 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:06:15 crc kubenswrapper[4762]: I1009 14:06:15.964698 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:06:15 crc kubenswrapper[4762]: E1009 14:06:15.965188 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:06:29 crc kubenswrapper[4762]: I1009 14:06:29.965562 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:06:29 crc kubenswrapper[4762]: E1009 14:06:29.966342 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:06:41 crc kubenswrapper[4762]: I1009 14:06:41.965339 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:06:41 crc kubenswrapper[4762]: E1009 14:06:41.966277 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:06:55 crc kubenswrapper[4762]: I1009 14:06:55.965064 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:06:55 crc kubenswrapper[4762]: E1009 14:06:55.965792 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:07:07 crc kubenswrapper[4762]: I1009 14:07:07.965947 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:07:07 crc kubenswrapper[4762]: E1009 14:07:07.968042 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:07:19 crc kubenswrapper[4762]: I1009 14:07:19.964955 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:07:19 crc kubenswrapper[4762]: E1009 14:07:19.966915 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:07:31 crc kubenswrapper[4762]: I1009 14:07:31.966115 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:07:31 crc kubenswrapper[4762]: E1009 14:07:31.967931 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:07:45 crc kubenswrapper[4762]: I1009 14:07:45.965192 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:07:45 crc kubenswrapper[4762]: E1009 14:07:45.965924 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:07:56 crc kubenswrapper[4762]: I1009 14:07:56.965330 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:07:56 crc kubenswrapper[4762]: E1009 14:07:56.966149 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:08:11 crc kubenswrapper[4762]: I1009 14:08:11.966147 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:08:11 crc kubenswrapper[4762]: E1009 14:08:11.966977 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:08:26 crc kubenswrapper[4762]: I1009 14:08:26.965278 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:08:26 crc kubenswrapper[4762]: E1009 14:08:26.966092 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:08:37 crc kubenswrapper[4762]: I1009 14:08:37.965598 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:08:37 crc kubenswrapper[4762]: E1009 14:08:37.966326 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:08:49 crc kubenswrapper[4762]: I1009 14:08:49.965245 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:08:49 crc kubenswrapper[4762]: E1009 14:08:49.965934 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:09:00 crc kubenswrapper[4762]: I1009 14:09:00.965580 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:09:00 crc kubenswrapper[4762]: E1009 14:09:00.966267 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:09:15 crc kubenswrapper[4762]: I1009 14:09:15.965378 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:09:15 crc kubenswrapper[4762]: E1009 14:09:15.965986 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:09:29 crc kubenswrapper[4762]: I1009 14:09:29.964962 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:09:29 crc kubenswrapper[4762]: E1009 14:09:29.965782 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:09:41 crc kubenswrapper[4762]: I1009 14:09:41.965899 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:09:41 crc kubenswrapper[4762]: E1009 14:09:41.966492 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:09:55 crc kubenswrapper[4762]: I1009 14:09:55.965149 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:09:55 crc kubenswrapper[4762]: E1009 14:09:55.965951 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:10:09 crc kubenswrapper[4762]: I1009 14:10:09.966127 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:10:09 crc kubenswrapper[4762]: E1009 14:10:09.967016 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:10:20 crc kubenswrapper[4762]: I1009 14:10:20.965831 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:10:20 crc kubenswrapper[4762]: E1009 14:10:20.966873 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:10:34 crc kubenswrapper[4762]: I1009 14:10:34.971000 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:10:34 crc kubenswrapper[4762]: E1009 14:10:34.972204 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:10:46 crc kubenswrapper[4762]: I1009 14:10:46.965859 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:10:47 crc kubenswrapper[4762]: I1009 14:10:47.600700 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab"} Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.500214 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:11:49 crc kubenswrapper[4762]: E1009 14:11:49.501224 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="extract-utilities" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.501244 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="extract-utilities" Oct 09 14:11:49 crc kubenswrapper[4762]: E1009 14:11:49.501270 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="extract-content" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.501279 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="extract-content" Oct 09 14:11:49 crc kubenswrapper[4762]: E1009 14:11:49.501297 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="registry-server" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.501307 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="registry-server" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.501480 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a278433d-4701-4cf9-9fc0-354d836e108a" containerName="registry-server" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.502888 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.515690 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.528886 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j78qx\" (UniqueName: \"kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.528955 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.529097 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.629928 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j78qx\" (UniqueName: \"kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.629979 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.630082 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.630568 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.631094 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.648478 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j78qx\" (UniqueName: \"kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx\") pod \"redhat-marketplace-j9dnc\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:49 crc kubenswrapper[4762]: I1009 14:11:49.822408 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:50 crc kubenswrapper[4762]: I1009 14:11:50.029896 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:11:50 crc kubenswrapper[4762]: I1009 14:11:50.073975 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerStarted","Data":"3843aae2999f8027c52afdb1ae103a0084ea4792a800acad6c0f0e826e515276"} Oct 09 14:11:51 crc kubenswrapper[4762]: I1009 14:11:51.083565 4762 generic.go:334] "Generic (PLEG): container finished" podID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerID="2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3" exitCode=0 Oct 09 14:11:51 crc kubenswrapper[4762]: I1009 14:11:51.083667 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerDied","Data":"2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3"} Oct 09 14:11:51 crc kubenswrapper[4762]: I1009 14:11:51.085816 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:11:52 crc kubenswrapper[4762]: I1009 14:11:52.098926 4762 generic.go:334] "Generic (PLEG): container finished" podID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerID="291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf" exitCode=0 Oct 09 14:11:52 crc kubenswrapper[4762]: I1009 14:11:52.098979 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerDied","Data":"291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf"} Oct 09 14:11:53 crc kubenswrapper[4762]: I1009 14:11:53.108194 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerStarted","Data":"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28"} Oct 09 14:11:53 crc kubenswrapper[4762]: I1009 14:11:53.130621 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j9dnc" podStartSLOduration=2.664070401 podStartE2EDuration="4.13059724s" podCreationTimestamp="2025-10-09 14:11:49 +0000 UTC" firstStartedPulling="2025-10-09 14:11:51.08552471 +0000 UTC m=+2786.859315759" lastFinishedPulling="2025-10-09 14:11:52.552051559 +0000 UTC m=+2788.325842598" observedRunningTime="2025-10-09 14:11:53.127887499 +0000 UTC m=+2788.901678538" watchObservedRunningTime="2025-10-09 14:11:53.13059724 +0000 UTC m=+2788.904388279" Oct 09 14:11:59 crc kubenswrapper[4762]: I1009 14:11:59.823376 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:59 crc kubenswrapper[4762]: I1009 14:11:59.824165 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:11:59 crc kubenswrapper[4762]: I1009 14:11:59.866266 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:12:00 crc kubenswrapper[4762]: I1009 14:12:00.204300 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:12:00 crc kubenswrapper[4762]: I1009 14:12:00.252387 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.179694 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j9dnc" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="registry-server" containerID="cri-o://4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28" gracePeriod=2 Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.574269 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.731759 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content\") pod \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.731826 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j78qx\" (UniqueName: \"kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx\") pod \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.731959 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities\") pod \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\" (UID: \"5f01f6a8-c246-420a-bb0c-acd150f6e4fb\") " Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.733255 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities" (OuterVolumeSpecName: "utilities") pod "5f01f6a8-c246-420a-bb0c-acd150f6e4fb" (UID: "5f01f6a8-c246-420a-bb0c-acd150f6e4fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.745601 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f01f6a8-c246-420a-bb0c-acd150f6e4fb" (UID: "5f01f6a8-c246-420a-bb0c-acd150f6e4fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.759863 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx" (OuterVolumeSpecName: "kube-api-access-j78qx") pod "5f01f6a8-c246-420a-bb0c-acd150f6e4fb" (UID: "5f01f6a8-c246-420a-bb0c-acd150f6e4fb"). InnerVolumeSpecName "kube-api-access-j78qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.833890 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j78qx\" (UniqueName: \"kubernetes.io/projected/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-kube-api-access-j78qx\") on node \"crc\" DevicePath \"\"" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.833937 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:12:02 crc kubenswrapper[4762]: I1009 14:12:02.833946 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f01f6a8-c246-420a-bb0c-acd150f6e4fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.189188 4762 generic.go:334] "Generic (PLEG): container finished" podID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerID="4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28" exitCode=0 Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.189256 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9dnc" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.189246 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerDied","Data":"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28"} Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.189419 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9dnc" event={"ID":"5f01f6a8-c246-420a-bb0c-acd150f6e4fb","Type":"ContainerDied","Data":"3843aae2999f8027c52afdb1ae103a0084ea4792a800acad6c0f0e826e515276"} Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.189446 4762 scope.go:117] "RemoveContainer" containerID="4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.211765 4762 scope.go:117] "RemoveContainer" containerID="291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.212473 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.219837 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9dnc"] Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.230351 4762 scope.go:117] "RemoveContainer" containerID="2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.259365 4762 scope.go:117] "RemoveContainer" containerID="4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28" Oct 09 14:12:03 crc kubenswrapper[4762]: E1009 14:12:03.259712 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28\": container with ID starting with 4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28 not found: ID does not exist" containerID="4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.259744 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28"} err="failed to get container status \"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28\": rpc error: code = NotFound desc = could not find container \"4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28\": container with ID starting with 4eedcfde9155b021f639185d01ab4dba67de7ca088b3cd562bf14160db820a28 not found: ID does not exist" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.259765 4762 scope.go:117] "RemoveContainer" containerID="291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf" Oct 09 14:12:03 crc kubenswrapper[4762]: E1009 14:12:03.259977 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf\": container with ID starting with 291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf not found: ID does not exist" containerID="291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.260000 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf"} err="failed to get container status \"291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf\": rpc error: code = NotFound desc = could not find container \"291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf\": container with ID starting with 291578140a5937d7401393eb10ff3ac01318aa3dbe0ea1c836d6cd5948a990cf not found: ID does not exist" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.260017 4762 scope.go:117] "RemoveContainer" containerID="2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3" Oct 09 14:12:03 crc kubenswrapper[4762]: E1009 14:12:03.260280 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3\": container with ID starting with 2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3 not found: ID does not exist" containerID="2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3" Oct 09 14:12:03 crc kubenswrapper[4762]: I1009 14:12:03.260368 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3"} err="failed to get container status \"2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3\": rpc error: code = NotFound desc = could not find container \"2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3\": container with ID starting with 2b8ccd3855dc1ba63614f60a4e24343ecfeb903ff827ec176d3409fc397de1a3 not found: ID does not exist" Oct 09 14:12:04 crc kubenswrapper[4762]: I1009 14:12:04.974136 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" path="/var/lib/kubelet/pods/5f01f6a8-c246-420a-bb0c-acd150f6e4fb/volumes" Oct 09 14:13:11 crc kubenswrapper[4762]: I1009 14:13:11.970059 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:13:11 crc kubenswrapper[4762]: I1009 14:13:11.971003 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:13:41 crc kubenswrapper[4762]: I1009 14:13:41.969255 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:13:41 crc kubenswrapper[4762]: I1009 14:13:41.970074 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.039342 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:13:46 crc kubenswrapper[4762]: E1009 14:13:46.040084 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="extract-content" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.040099 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="extract-content" Oct 09 14:13:46 crc kubenswrapper[4762]: E1009 14:13:46.040112 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="extract-utilities" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.040120 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="extract-utilities" Oct 09 14:13:46 crc kubenswrapper[4762]: E1009 14:13:46.040138 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="registry-server" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.040144 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="registry-server" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.040305 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f01f6a8-c246-420a-bb0c-acd150f6e4fb" containerName="registry-server" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.041314 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.049451 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.142627 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.142971 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96rcb\" (UniqueName: \"kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.143203 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.244438 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.244511 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96rcb\" (UniqueName: \"kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.244605 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.245021 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.245109 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.266448 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96rcb\" (UniqueName: \"kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb\") pod \"community-operators-x6lgd\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.369853 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.636035 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:13:46 crc kubenswrapper[4762]: W1009 14:13:46.646832 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48a69398_4728_443f_bb61_f5c287d9313b.slice/crio-daa3984902d75ee712fe58714fffc13b258abe4c4f47cfc40229a41a94a23b61 WatchSource:0}: Error finding container daa3984902d75ee712fe58714fffc13b258abe4c4f47cfc40229a41a94a23b61: Status 404 returned error can't find the container with id daa3984902d75ee712fe58714fffc13b258abe4c4f47cfc40229a41a94a23b61 Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.966792 4762 generic.go:334] "Generic (PLEG): container finished" podID="48a69398-4728-443f-bb61-f5c287d9313b" containerID="1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d" exitCode=0 Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.980386 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerDied","Data":"1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d"} Oct 09 14:13:46 crc kubenswrapper[4762]: I1009 14:13:46.980454 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerStarted","Data":"daa3984902d75ee712fe58714fffc13b258abe4c4f47cfc40229a41a94a23b61"} Oct 09 14:13:48 crc kubenswrapper[4762]: I1009 14:13:48.986460 4762 generic.go:334] "Generic (PLEG): container finished" podID="48a69398-4728-443f-bb61-f5c287d9313b" containerID="29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47" exitCode=0 Oct 09 14:13:48 crc kubenswrapper[4762]: I1009 14:13:48.986588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerDied","Data":"29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47"} Oct 09 14:13:49 crc kubenswrapper[4762]: I1009 14:13:49.994905 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerStarted","Data":"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0"} Oct 09 14:13:50 crc kubenswrapper[4762]: I1009 14:13:50.015801 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x6lgd" podStartSLOduration=1.4137987490000001 podStartE2EDuration="4.015781665s" podCreationTimestamp="2025-10-09 14:13:46 +0000 UTC" firstStartedPulling="2025-10-09 14:13:46.968330806 +0000 UTC m=+2902.742121845" lastFinishedPulling="2025-10-09 14:13:49.570313722 +0000 UTC m=+2905.344104761" observedRunningTime="2025-10-09 14:13:50.013588127 +0000 UTC m=+2905.787379166" watchObservedRunningTime="2025-10-09 14:13:50.015781665 +0000 UTC m=+2905.789572704" Oct 09 14:13:56 crc kubenswrapper[4762]: I1009 14:13:56.370475 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:56 crc kubenswrapper[4762]: I1009 14:13:56.371365 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:56 crc kubenswrapper[4762]: I1009 14:13:56.417330 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:57 crc kubenswrapper[4762]: I1009 14:13:57.101434 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:13:57 crc kubenswrapper[4762]: I1009 14:13:57.147202 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:13:59 crc kubenswrapper[4762]: I1009 14:13:59.073242 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x6lgd" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="registry-server" containerID="cri-o://79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0" gracePeriod=2 Oct 09 14:13:59 crc kubenswrapper[4762]: I1009 14:13:59.960619 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.081790 4762 generic.go:334] "Generic (PLEG): container finished" podID="48a69398-4728-443f-bb61-f5c287d9313b" containerID="79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0" exitCode=0 Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.081840 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerDied","Data":"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0"} Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.081872 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x6lgd" event={"ID":"48a69398-4728-443f-bb61-f5c287d9313b","Type":"ContainerDied","Data":"daa3984902d75ee712fe58714fffc13b258abe4c4f47cfc40229a41a94a23b61"} Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.081875 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x6lgd" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.081892 4762 scope.go:117] "RemoveContainer" containerID="79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.099530 4762 scope.go:117] "RemoveContainer" containerID="29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.118518 4762 scope.go:117] "RemoveContainer" containerID="1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.146618 4762 scope.go:117] "RemoveContainer" containerID="79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0" Oct 09 14:14:00 crc kubenswrapper[4762]: E1009 14:14:00.147169 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0\": container with ID starting with 79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0 not found: ID does not exist" containerID="79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.147221 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0"} err="failed to get container status \"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0\": rpc error: code = NotFound desc = could not find container \"79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0\": container with ID starting with 79cd7d4bc6eaa5a7abfec137fa303072744fb3732088f9a7605544804f985ed0 not found: ID does not exist" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.147249 4762 scope.go:117] "RemoveContainer" containerID="29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.147260 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities\") pod \"48a69398-4728-443f-bb61-f5c287d9313b\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.147288 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content\") pod \"48a69398-4728-443f-bb61-f5c287d9313b\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.147429 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96rcb\" (UniqueName: \"kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb\") pod \"48a69398-4728-443f-bb61-f5c287d9313b\" (UID: \"48a69398-4728-443f-bb61-f5c287d9313b\") " Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.148208 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities" (OuterVolumeSpecName: "utilities") pod "48a69398-4728-443f-bb61-f5c287d9313b" (UID: "48a69398-4728-443f-bb61-f5c287d9313b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:14:00 crc kubenswrapper[4762]: E1009 14:14:00.148587 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47\": container with ID starting with 29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47 not found: ID does not exist" containerID="29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.148796 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47"} err="failed to get container status \"29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47\": rpc error: code = NotFound desc = could not find container \"29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47\": container with ID starting with 29dd791187dbc60b3c9691da31d140b238627b4c93eb704201d1d27912d68d47 not found: ID does not exist" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.148840 4762 scope.go:117] "RemoveContainer" containerID="1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d" Oct 09 14:14:00 crc kubenswrapper[4762]: E1009 14:14:00.149303 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d\": container with ID starting with 1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d not found: ID does not exist" containerID="1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.149345 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d"} err="failed to get container status \"1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d\": rpc error: code = NotFound desc = could not find container \"1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d\": container with ID starting with 1d57c0035e2924bd1a857895618db56ecaa806cbe0e7efe4aea456784e2caa5d not found: ID does not exist" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.156012 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb" (OuterVolumeSpecName: "kube-api-access-96rcb") pod "48a69398-4728-443f-bb61-f5c287d9313b" (UID: "48a69398-4728-443f-bb61-f5c287d9313b"). InnerVolumeSpecName "kube-api-access-96rcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.207928 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48a69398-4728-443f-bb61-f5c287d9313b" (UID: "48a69398-4728-443f-bb61-f5c287d9313b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.250095 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96rcb\" (UniqueName: \"kubernetes.io/projected/48a69398-4728-443f-bb61-f5c287d9313b-kube-api-access-96rcb\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.250148 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.250160 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48a69398-4728-443f-bb61-f5c287d9313b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.412314 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.417589 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x6lgd"] Oct 09 14:14:00 crc kubenswrapper[4762]: I1009 14:14:00.974450 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a69398-4728-443f-bb61-f5c287d9313b" path="/var/lib/kubelet/pods/48a69398-4728-443f-bb61-f5c287d9313b/volumes" Oct 09 14:14:11 crc kubenswrapper[4762]: I1009 14:14:11.969106 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:14:11 crc kubenswrapper[4762]: I1009 14:14:11.969727 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:14:11 crc kubenswrapper[4762]: I1009 14:14:11.969782 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:14:11 crc kubenswrapper[4762]: I1009 14:14:11.970233 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:14:11 crc kubenswrapper[4762]: I1009 14:14:11.970304 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab" gracePeriod=600 Oct 09 14:14:12 crc kubenswrapper[4762]: I1009 14:14:12.175423 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab" exitCode=0 Oct 09 14:14:12 crc kubenswrapper[4762]: I1009 14:14:12.175480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab"} Oct 09 14:14:12 crc kubenswrapper[4762]: I1009 14:14:12.175520 4762 scope.go:117] "RemoveContainer" containerID="18b07ac7676d431f8aee7bc53ffc9428ba164c2f728c0a70147f633b4f219a73" Oct 09 14:14:13 crc kubenswrapper[4762]: I1009 14:14:13.185081 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4"} Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.046265 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:15 crc kubenswrapper[4762]: E1009 14:14:15.046848 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="extract-utilities" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.046862 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="extract-utilities" Oct 09 14:14:15 crc kubenswrapper[4762]: E1009 14:14:15.046876 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="extract-content" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.046883 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="extract-content" Oct 09 14:14:15 crc kubenswrapper[4762]: E1009 14:14:15.046903 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="registry-server" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.046909 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="registry-server" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.047081 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a69398-4728-443f-bb61-f5c287d9313b" containerName="registry-server" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.048643 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.060322 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.173351 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.173456 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrq4f\" (UniqueName: \"kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.173623 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.275041 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.275361 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrq4f\" (UniqueName: \"kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.275460 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.275613 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.275867 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.308845 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrq4f\" (UniqueName: \"kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f\") pod \"certified-operators-8cv2d\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.367840 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:15 crc kubenswrapper[4762]: I1009 14:14:15.869448 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:16 crc kubenswrapper[4762]: I1009 14:14:16.205208 4762 generic.go:334] "Generic (PLEG): container finished" podID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerID="e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e" exitCode=0 Oct 09 14:14:16 crc kubenswrapper[4762]: I1009 14:14:16.205253 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerDied","Data":"e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e"} Oct 09 14:14:16 crc kubenswrapper[4762]: I1009 14:14:16.205278 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerStarted","Data":"a4325edfe90c1b459b6db27d1eb2be791c6dcdb52573500ba088be137b0c03d1"} Oct 09 14:14:17 crc kubenswrapper[4762]: I1009 14:14:17.214732 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerStarted","Data":"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613"} Oct 09 14:14:18 crc kubenswrapper[4762]: I1009 14:14:18.225214 4762 generic.go:334] "Generic (PLEG): container finished" podID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerID="0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613" exitCode=0 Oct 09 14:14:18 crc kubenswrapper[4762]: I1009 14:14:18.225264 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerDied","Data":"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613"} Oct 09 14:14:19 crc kubenswrapper[4762]: I1009 14:14:19.235662 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerStarted","Data":"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57"} Oct 09 14:14:19 crc kubenswrapper[4762]: I1009 14:14:19.256406 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8cv2d" podStartSLOduration=1.7531000890000001 podStartE2EDuration="4.256383653s" podCreationTimestamp="2025-10-09 14:14:15 +0000 UTC" firstStartedPulling="2025-10-09 14:14:16.206921492 +0000 UTC m=+2931.980712531" lastFinishedPulling="2025-10-09 14:14:18.710205046 +0000 UTC m=+2934.483996095" observedRunningTime="2025-10-09 14:14:19.254692798 +0000 UTC m=+2935.028483837" watchObservedRunningTime="2025-10-09 14:14:19.256383653 +0000 UTC m=+2935.030174712" Oct 09 14:14:25 crc kubenswrapper[4762]: I1009 14:14:25.368743 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:25 crc kubenswrapper[4762]: I1009 14:14:25.370796 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:25 crc kubenswrapper[4762]: I1009 14:14:25.419129 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:26 crc kubenswrapper[4762]: I1009 14:14:26.337996 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:26 crc kubenswrapper[4762]: I1009 14:14:26.387684 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.307918 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8cv2d" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="registry-server" containerID="cri-o://fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57" gracePeriod=2 Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.791797 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.979785 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities\") pod \"c32bd705-52e2-4797-af75-42c2cb050cb2\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.980135 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content\") pod \"c32bd705-52e2-4797-af75-42c2cb050cb2\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.980166 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrq4f\" (UniqueName: \"kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f\") pod \"c32bd705-52e2-4797-af75-42c2cb050cb2\" (UID: \"c32bd705-52e2-4797-af75-42c2cb050cb2\") " Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.980963 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities" (OuterVolumeSpecName: "utilities") pod "c32bd705-52e2-4797-af75-42c2cb050cb2" (UID: "c32bd705-52e2-4797-af75-42c2cb050cb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:14:28 crc kubenswrapper[4762]: I1009 14:14:28.986861 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f" (OuterVolumeSpecName: "kube-api-access-mrq4f") pod "c32bd705-52e2-4797-af75-42c2cb050cb2" (UID: "c32bd705-52e2-4797-af75-42c2cb050cb2"). InnerVolumeSpecName "kube-api-access-mrq4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.028565 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c32bd705-52e2-4797-af75-42c2cb050cb2" (UID: "c32bd705-52e2-4797-af75-42c2cb050cb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.081987 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.082019 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c32bd705-52e2-4797-af75-42c2cb050cb2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.082030 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrq4f\" (UniqueName: \"kubernetes.io/projected/c32bd705-52e2-4797-af75-42c2cb050cb2-kube-api-access-mrq4f\") on node \"crc\" DevicePath \"\"" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.308827 4762 generic.go:334] "Generic (PLEG): container finished" podID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerID="fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57" exitCode=0 Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.308870 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerDied","Data":"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57"} Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.308895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8cv2d" event={"ID":"c32bd705-52e2-4797-af75-42c2cb050cb2","Type":"ContainerDied","Data":"a4325edfe90c1b459b6db27d1eb2be791c6dcdb52573500ba088be137b0c03d1"} Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.308913 4762 scope.go:117] "RemoveContainer" containerID="fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.309024 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8cv2d" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.328822 4762 scope.go:117] "RemoveContainer" containerID="0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.339799 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.344947 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8cv2d"] Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.368839 4762 scope.go:117] "RemoveContainer" containerID="e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.387612 4762 scope.go:117] "RemoveContainer" containerID="fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57" Oct 09 14:14:29 crc kubenswrapper[4762]: E1009 14:14:29.388371 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57\": container with ID starting with fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57 not found: ID does not exist" containerID="fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.388439 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57"} err="failed to get container status \"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57\": rpc error: code = NotFound desc = could not find container \"fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57\": container with ID starting with fb47426200ff5f3cb3c3567fe79e7c96e22e777eb983a1f463b47e8e239e1d57 not found: ID does not exist" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.388465 4762 scope.go:117] "RemoveContainer" containerID="0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613" Oct 09 14:14:29 crc kubenswrapper[4762]: E1009 14:14:29.388899 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613\": container with ID starting with 0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613 not found: ID does not exist" containerID="0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.388930 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613"} err="failed to get container status \"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613\": rpc error: code = NotFound desc = could not find container \"0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613\": container with ID starting with 0be61f25745a0d7b53de84ea3e8b59062e93f966e3530ed7bb8fb9555b904613 not found: ID does not exist" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.388944 4762 scope.go:117] "RemoveContainer" containerID="e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e" Oct 09 14:14:29 crc kubenswrapper[4762]: E1009 14:14:29.389219 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e\": container with ID starting with e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e not found: ID does not exist" containerID="e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e" Oct 09 14:14:29 crc kubenswrapper[4762]: I1009 14:14:29.389269 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e"} err="failed to get container status \"e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e\": rpc error: code = NotFound desc = could not find container \"e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e\": container with ID starting with e5ddc97d91c88c1dba4996193f00f85cb3f6ba3b4947b8d8b035fa1d1b52851e not found: ID does not exist" Oct 09 14:14:30 crc kubenswrapper[4762]: I1009 14:14:30.974042 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" path="/var/lib/kubelet/pods/c32bd705-52e2-4797-af75-42c2cb050cb2/volumes" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.180854 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k"] Oct 09 14:15:00 crc kubenswrapper[4762]: E1009 14:15:00.183231 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="registry-server" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.183955 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="registry-server" Oct 09 14:15:00 crc kubenswrapper[4762]: E1009 14:15:00.184080 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="extract-utilities" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.184155 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="extract-utilities" Oct 09 14:15:00 crc kubenswrapper[4762]: E1009 14:15:00.184254 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="extract-content" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.184323 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="extract-content" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.184923 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c32bd705-52e2-4797-af75-42c2cb050cb2" containerName="registry-server" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.185752 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.188053 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.188053 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.190428 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k"] Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.214360 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.214423 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.214516 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqwk4\" (UniqueName: \"kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.316139 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.316466 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.316511 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqwk4\" (UniqueName: \"kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.317522 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.322559 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.332470 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqwk4\" (UniqueName: \"kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4\") pod \"collect-profiles-29333655-rpr8k\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.508375 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:00 crc kubenswrapper[4762]: I1009 14:15:00.720374 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k"] Oct 09 14:15:01 crc kubenswrapper[4762]: I1009 14:15:01.565367 4762 generic.go:334] "Generic (PLEG): container finished" podID="0bf72a90-6a69-4588-beea-759828586d14" containerID="8a7e64f2a56179a0c804eec034b1b6d3bb99d69afc72701cbde82f446d02f993" exitCode=0 Oct 09 14:15:01 crc kubenswrapper[4762]: I1009 14:15:01.565427 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" event={"ID":"0bf72a90-6a69-4588-beea-759828586d14","Type":"ContainerDied","Data":"8a7e64f2a56179a0c804eec034b1b6d3bb99d69afc72701cbde82f446d02f993"} Oct 09 14:15:01 crc kubenswrapper[4762]: I1009 14:15:01.565664 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" event={"ID":"0bf72a90-6a69-4588-beea-759828586d14","Type":"ContainerStarted","Data":"d2000cb96b5e8626538be4496a3511b57479476c0a98401a7f753b7640d43224"} Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.850913 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.855463 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqwk4\" (UniqueName: \"kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4\") pod \"0bf72a90-6a69-4588-beea-759828586d14\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.855603 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume\") pod \"0bf72a90-6a69-4588-beea-759828586d14\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.855666 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume\") pod \"0bf72a90-6a69-4588-beea-759828586d14\" (UID: \"0bf72a90-6a69-4588-beea-759828586d14\") " Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.856265 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume" (OuterVolumeSpecName: "config-volume") pod "0bf72a90-6a69-4588-beea-759828586d14" (UID: "0bf72a90-6a69-4588-beea-759828586d14"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.860326 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4" (OuterVolumeSpecName: "kube-api-access-vqwk4") pod "0bf72a90-6a69-4588-beea-759828586d14" (UID: "0bf72a90-6a69-4588-beea-759828586d14"). InnerVolumeSpecName "kube-api-access-vqwk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.860334 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0bf72a90-6a69-4588-beea-759828586d14" (UID: "0bf72a90-6a69-4588-beea-759828586d14"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.956610 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bf72a90-6a69-4588-beea-759828586d14-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.956662 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqwk4\" (UniqueName: \"kubernetes.io/projected/0bf72a90-6a69-4588-beea-759828586d14-kube-api-access-vqwk4\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:02 crc kubenswrapper[4762]: I1009 14:15:02.956673 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0bf72a90-6a69-4588-beea-759828586d14-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:03 crc kubenswrapper[4762]: I1009 14:15:03.588326 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" event={"ID":"0bf72a90-6a69-4588-beea-759828586d14","Type":"ContainerDied","Data":"d2000cb96b5e8626538be4496a3511b57479476c0a98401a7f753b7640d43224"} Oct 09 14:15:03 crc kubenswrapper[4762]: I1009 14:15:03.588657 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2000cb96b5e8626538be4496a3511b57479476c0a98401a7f753b7640d43224" Oct 09 14:15:03 crc kubenswrapper[4762]: I1009 14:15:03.588398 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k" Oct 09 14:15:03 crc kubenswrapper[4762]: I1009 14:15:03.918990 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w"] Oct 09 14:15:03 crc kubenswrapper[4762]: I1009 14:15:03.923619 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333610-fjr7w"] Oct 09 14:15:04 crc kubenswrapper[4762]: I1009 14:15:04.977388 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b86678f-707c-438a-afc6-a6475c9a46c3" path="/var/lib/kubelet/pods/5b86678f-707c-438a-afc6-a6475c9a46c3/volumes" Oct 09 14:15:18 crc kubenswrapper[4762]: I1009 14:15:18.927130 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:18 crc kubenswrapper[4762]: E1009 14:15:18.927933 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf72a90-6a69-4588-beea-759828586d14" containerName="collect-profiles" Oct 09 14:15:18 crc kubenswrapper[4762]: I1009 14:15:18.927945 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf72a90-6a69-4588-beea-759828586d14" containerName="collect-profiles" Oct 09 14:15:18 crc kubenswrapper[4762]: I1009 14:15:18.928082 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bf72a90-6a69-4588-beea-759828586d14" containerName="collect-profiles" Oct 09 14:15:18 crc kubenswrapper[4762]: I1009 14:15:18.929046 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:18 crc kubenswrapper[4762]: I1009 14:15:18.954419 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.083017 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzzsh\" (UniqueName: \"kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.083193 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.083231 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.184688 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzzsh\" (UniqueName: \"kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.184845 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.184869 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.185328 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.185551 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.208673 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzzsh\" (UniqueName: \"kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh\") pod \"redhat-operators-225qc\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.253589 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.675451 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:19 crc kubenswrapper[4762]: I1009 14:15:19.717058 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerStarted","Data":"a06ba483251d8cf38253f2fe65fc1869c295a72218a8a4afb447edabda8fdad8"} Oct 09 14:15:20 crc kubenswrapper[4762]: I1009 14:15:20.724230 4762 generic.go:334] "Generic (PLEG): container finished" podID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerID="bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b" exitCode=0 Oct 09 14:15:20 crc kubenswrapper[4762]: I1009 14:15:20.724275 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerDied","Data":"bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b"} Oct 09 14:15:21 crc kubenswrapper[4762]: I1009 14:15:21.732655 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerStarted","Data":"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062"} Oct 09 14:15:22 crc kubenswrapper[4762]: I1009 14:15:22.742141 4762 generic.go:334] "Generic (PLEG): container finished" podID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerID="7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062" exitCode=0 Oct 09 14:15:22 crc kubenswrapper[4762]: I1009 14:15:22.742192 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerDied","Data":"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062"} Oct 09 14:15:23 crc kubenswrapper[4762]: I1009 14:15:23.751282 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerStarted","Data":"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb"} Oct 09 14:15:23 crc kubenswrapper[4762]: I1009 14:15:23.777581 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-225qc" podStartSLOduration=3.298707542 podStartE2EDuration="5.777562278s" podCreationTimestamp="2025-10-09 14:15:18 +0000 UTC" firstStartedPulling="2025-10-09 14:15:20.725713696 +0000 UTC m=+2996.499504735" lastFinishedPulling="2025-10-09 14:15:23.204568432 +0000 UTC m=+2998.978359471" observedRunningTime="2025-10-09 14:15:23.777316002 +0000 UTC m=+2999.551107041" watchObservedRunningTime="2025-10-09 14:15:23.777562278 +0000 UTC m=+2999.551353317" Oct 09 14:15:29 crc kubenswrapper[4762]: I1009 14:15:29.253800 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:29 crc kubenswrapper[4762]: I1009 14:15:29.254725 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:29 crc kubenswrapper[4762]: I1009 14:15:29.298860 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:29 crc kubenswrapper[4762]: I1009 14:15:29.831742 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:29 crc kubenswrapper[4762]: I1009 14:15:29.875165 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:30 crc kubenswrapper[4762]: I1009 14:15:30.814751 4762 scope.go:117] "RemoveContainer" containerID="e882a2019eef9efc9666557c161ad28de6d8e3ca8387165480230311abb796fb" Oct 09 14:15:31 crc kubenswrapper[4762]: I1009 14:15:31.808663 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-225qc" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="registry-server" containerID="cri-o://76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb" gracePeriod=2 Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.194007 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.377349 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzzsh\" (UniqueName: \"kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh\") pod \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.377491 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities\") pod \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.377765 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content\") pod \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\" (UID: \"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9\") " Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.380026 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities" (OuterVolumeSpecName: "utilities") pod "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" (UID: "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.386681 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh" (OuterVolumeSpecName: "kube-api-access-nzzsh") pod "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" (UID: "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9"). InnerVolumeSpecName "kube-api-access-nzzsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.480587 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzzsh\" (UniqueName: \"kubernetes.io/projected/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-kube-api-access-nzzsh\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.480621 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.818366 4762 generic.go:334] "Generic (PLEG): container finished" podID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerID="76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb" exitCode=0 Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.818406 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerDied","Data":"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb"} Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.818447 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-225qc" event={"ID":"d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9","Type":"ContainerDied","Data":"a06ba483251d8cf38253f2fe65fc1869c295a72218a8a4afb447edabda8fdad8"} Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.818464 4762 scope.go:117] "RemoveContainer" containerID="76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.818580 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-225qc" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.843987 4762 scope.go:117] "RemoveContainer" containerID="7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.859696 4762 scope.go:117] "RemoveContainer" containerID="bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.886571 4762 scope.go:117] "RemoveContainer" containerID="76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb" Oct 09 14:15:32 crc kubenswrapper[4762]: E1009 14:15:32.887157 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb\": container with ID starting with 76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb not found: ID does not exist" containerID="76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.887217 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb"} err="failed to get container status \"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb\": rpc error: code = NotFound desc = could not find container \"76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb\": container with ID starting with 76cc637e232ce52ea7b1314283053df8488a3fafa9b8d21f9f87b95c7f6d19eb not found: ID does not exist" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.887253 4762 scope.go:117] "RemoveContainer" containerID="7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.887374 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" (UID: "d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:15:32 crc kubenswrapper[4762]: E1009 14:15:32.887724 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062\": container with ID starting with 7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062 not found: ID does not exist" containerID="7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.887774 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062"} err="failed to get container status \"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062\": rpc error: code = NotFound desc = could not find container \"7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062\": container with ID starting with 7570e91fce631e254f60895fa66ecebdabbcfdf26c521ca7e2c652b49d0fd062 not found: ID does not exist" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.887802 4762 scope.go:117] "RemoveContainer" containerID="bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b" Oct 09 14:15:32 crc kubenswrapper[4762]: E1009 14:15:32.888167 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b\": container with ID starting with bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b not found: ID does not exist" containerID="bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.888214 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b"} err="failed to get container status \"bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b\": rpc error: code = NotFound desc = could not find container \"bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b\": container with ID starting with bfa925f7c04e5e362d32af1d7ecb1151197dc2df13dd7c6894af08844ffedc5b not found: ID does not exist" Oct 09 14:15:32 crc kubenswrapper[4762]: I1009 14:15:32.985953 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:15:33 crc kubenswrapper[4762]: I1009 14:15:33.145562 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:33 crc kubenswrapper[4762]: I1009 14:15:33.154249 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-225qc"] Oct 09 14:15:34 crc kubenswrapper[4762]: I1009 14:15:34.974578 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" path="/var/lib/kubelet/pods/d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9/volumes" Oct 09 14:16:41 crc kubenswrapper[4762]: I1009 14:16:41.969445 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:16:41 crc kubenswrapper[4762]: I1009 14:16:41.970197 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:17:11 crc kubenswrapper[4762]: I1009 14:17:11.969966 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:17:11 crc kubenswrapper[4762]: I1009 14:17:11.970569 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:17:41 crc kubenswrapper[4762]: I1009 14:17:41.969997 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:17:41 crc kubenswrapper[4762]: I1009 14:17:41.971008 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:17:41 crc kubenswrapper[4762]: I1009 14:17:41.971107 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:17:41 crc kubenswrapper[4762]: I1009 14:17:41.972213 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:17:41 crc kubenswrapper[4762]: I1009 14:17:41.972306 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" gracePeriod=600 Oct 09 14:17:42 crc kubenswrapper[4762]: E1009 14:17:42.109252 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:17:42 crc kubenswrapper[4762]: E1009 14:17:42.202139 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod366049a3_acf6_488c_9f93_4557528d6d14.slice/crio-conmon-5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4.scope\": RecentStats: unable to find data in memory cache]" Oct 09 14:17:42 crc kubenswrapper[4762]: I1009 14:17:42.740188 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" exitCode=0 Oct 09 14:17:42 crc kubenswrapper[4762]: I1009 14:17:42.740253 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4"} Oct 09 14:17:42 crc kubenswrapper[4762]: I1009 14:17:42.740481 4762 scope.go:117] "RemoveContainer" containerID="8574429399b1c113b0ce6086afa7c562d5900a772553dc00a80ebe2219a037ab" Oct 09 14:17:42 crc kubenswrapper[4762]: I1009 14:17:42.741412 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:17:42 crc kubenswrapper[4762]: E1009 14:17:42.741950 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:17:54 crc kubenswrapper[4762]: I1009 14:17:54.982188 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:17:54 crc kubenswrapper[4762]: E1009 14:17:54.982965 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:18:06 crc kubenswrapper[4762]: I1009 14:18:06.965464 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:18:06 crc kubenswrapper[4762]: E1009 14:18:06.966205 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:18:20 crc kubenswrapper[4762]: I1009 14:18:20.964664 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:18:20 crc kubenswrapper[4762]: E1009 14:18:20.966653 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:18:31 crc kubenswrapper[4762]: I1009 14:18:31.965545 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:18:31 crc kubenswrapper[4762]: E1009 14:18:31.967411 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:18:45 crc kubenswrapper[4762]: I1009 14:18:45.964768 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:18:45 crc kubenswrapper[4762]: E1009 14:18:45.965475 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:18:56 crc kubenswrapper[4762]: I1009 14:18:56.965652 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:18:56 crc kubenswrapper[4762]: E1009 14:18:56.966367 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:19:07 crc kubenswrapper[4762]: I1009 14:19:07.965136 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:19:07 crc kubenswrapper[4762]: E1009 14:19:07.965916 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:19:18 crc kubenswrapper[4762]: I1009 14:19:18.965125 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:19:18 crc kubenswrapper[4762]: E1009 14:19:18.965963 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:19:30 crc kubenswrapper[4762]: I1009 14:19:30.965943 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:19:30 crc kubenswrapper[4762]: E1009 14:19:30.966699 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:19:43 crc kubenswrapper[4762]: I1009 14:19:43.965734 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:19:43 crc kubenswrapper[4762]: E1009 14:19:43.966332 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:19:58 crc kubenswrapper[4762]: I1009 14:19:58.965441 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:19:58 crc kubenswrapper[4762]: E1009 14:19:58.966235 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:20:10 crc kubenswrapper[4762]: I1009 14:20:10.965735 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:20:10 crc kubenswrapper[4762]: E1009 14:20:10.966544 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:20:21 crc kubenswrapper[4762]: I1009 14:20:21.966519 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:20:21 crc kubenswrapper[4762]: E1009 14:20:21.968452 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:20:33 crc kubenswrapper[4762]: I1009 14:20:33.965609 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:20:33 crc kubenswrapper[4762]: E1009 14:20:33.966243 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:20:48 crc kubenswrapper[4762]: I1009 14:20:48.966257 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:20:48 crc kubenswrapper[4762]: E1009 14:20:48.971822 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:21:03 crc kubenswrapper[4762]: I1009 14:21:03.965178 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:21:03 crc kubenswrapper[4762]: E1009 14:21:03.967023 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:21:18 crc kubenswrapper[4762]: I1009 14:21:18.964857 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:21:18 crc kubenswrapper[4762]: E1009 14:21:18.965617 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:21:31 crc kubenswrapper[4762]: I1009 14:21:31.965066 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:21:31 crc kubenswrapper[4762]: E1009 14:21:31.965789 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:21:42 crc kubenswrapper[4762]: I1009 14:21:42.965075 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:21:42 crc kubenswrapper[4762]: E1009 14:21:42.965805 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:21:57 crc kubenswrapper[4762]: I1009 14:21:57.965752 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:21:57 crc kubenswrapper[4762]: E1009 14:21:57.966941 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:22:12 crc kubenswrapper[4762]: I1009 14:22:12.966215 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:22:12 crc kubenswrapper[4762]: E1009 14:22:12.967540 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:22:23 crc kubenswrapper[4762]: I1009 14:22:23.964714 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:22:23 crc kubenswrapper[4762]: E1009 14:22:23.965288 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:22:35 crc kubenswrapper[4762]: I1009 14:22:35.966070 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:22:35 crc kubenswrapper[4762]: E1009 14:22:35.966898 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:22:46 crc kubenswrapper[4762]: I1009 14:22:46.966185 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:22:47 crc kubenswrapper[4762]: I1009 14:22:47.953237 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914"} Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.671439 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:22:50 crc kubenswrapper[4762]: E1009 14:22:50.674385 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="extract-utilities" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.674577 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="extract-utilities" Oct 09 14:22:50 crc kubenswrapper[4762]: E1009 14:22:50.674752 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="registry-server" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.674848 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="registry-server" Oct 09 14:22:50 crc kubenswrapper[4762]: E1009 14:22:50.674944 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="extract-content" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.675010 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="extract-content" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.675319 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d94d3ba3-7419-45b8-b4b2-f92ab1f98ab9" containerName="registry-server" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.677342 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.700137 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.791031 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.791124 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9srj\" (UniqueName: \"kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.791195 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.892774 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9srj\" (UniqueName: \"kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.892872 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.892918 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.893505 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.894081 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.913883 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9srj\" (UniqueName: \"kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj\") pod \"redhat-marketplace-nk7tx\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:50 crc kubenswrapper[4762]: I1009 14:22:50.996535 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:22:51 crc kubenswrapper[4762]: I1009 14:22:51.202082 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:22:51 crc kubenswrapper[4762]: I1009 14:22:51.996732 4762 generic.go:334] "Generic (PLEG): container finished" podID="44aea371-a4a8-4054-a473-4cdda585ef42" containerID="2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676" exitCode=0 Oct 09 14:22:51 crc kubenswrapper[4762]: I1009 14:22:51.996807 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerDied","Data":"2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676"} Oct 09 14:22:51 crc kubenswrapper[4762]: I1009 14:22:51.997083 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerStarted","Data":"5c2ac824b5830d5bfd24f16b167a90424101c49e4d3fda7d51f313ba6470a1b8"} Oct 09 14:22:51 crc kubenswrapper[4762]: I1009 14:22:51.998958 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:22:54 crc kubenswrapper[4762]: I1009 14:22:54.011619 4762 generic.go:334] "Generic (PLEG): container finished" podID="44aea371-a4a8-4054-a473-4cdda585ef42" containerID="dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda" exitCode=0 Oct 09 14:22:54 crc kubenswrapper[4762]: I1009 14:22:54.012138 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerDied","Data":"dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda"} Oct 09 14:22:55 crc kubenswrapper[4762]: I1009 14:22:55.020192 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerStarted","Data":"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431"} Oct 09 14:22:55 crc kubenswrapper[4762]: I1009 14:22:55.042910 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nk7tx" podStartSLOduration=2.62974529 podStartE2EDuration="5.042891468s" podCreationTimestamp="2025-10-09 14:22:50 +0000 UTC" firstStartedPulling="2025-10-09 14:22:51.998737195 +0000 UTC m=+3447.772528234" lastFinishedPulling="2025-10-09 14:22:54.411883373 +0000 UTC m=+3450.185674412" observedRunningTime="2025-10-09 14:22:55.037836035 +0000 UTC m=+3450.811627094" watchObservedRunningTime="2025-10-09 14:22:55.042891468 +0000 UTC m=+3450.816682507" Oct 09 14:23:00 crc kubenswrapper[4762]: I1009 14:23:00.997135 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:00 crc kubenswrapper[4762]: I1009 14:23:00.997755 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:01 crc kubenswrapper[4762]: I1009 14:23:01.040147 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:01 crc kubenswrapper[4762]: I1009 14:23:01.119731 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:01 crc kubenswrapper[4762]: I1009 14:23:01.272393 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.084198 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nk7tx" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="registry-server" containerID="cri-o://44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431" gracePeriod=2 Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.472729 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.578468 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9srj\" (UniqueName: \"kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj\") pod \"44aea371-a4a8-4054-a473-4cdda585ef42\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.578616 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content\") pod \"44aea371-a4a8-4054-a473-4cdda585ef42\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.578733 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities\") pod \"44aea371-a4a8-4054-a473-4cdda585ef42\" (UID: \"44aea371-a4a8-4054-a473-4cdda585ef42\") " Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.579557 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities" (OuterVolumeSpecName: "utilities") pod "44aea371-a4a8-4054-a473-4cdda585ef42" (UID: "44aea371-a4a8-4054-a473-4cdda585ef42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.590921 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj" (OuterVolumeSpecName: "kube-api-access-l9srj") pod "44aea371-a4a8-4054-a473-4cdda585ef42" (UID: "44aea371-a4a8-4054-a473-4cdda585ef42"). InnerVolumeSpecName "kube-api-access-l9srj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.594986 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44aea371-a4a8-4054-a473-4cdda585ef42" (UID: "44aea371-a4a8-4054-a473-4cdda585ef42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.680146 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.680183 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44aea371-a4a8-4054-a473-4cdda585ef42-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:23:03 crc kubenswrapper[4762]: I1009 14:23:03.680220 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9srj\" (UniqueName: \"kubernetes.io/projected/44aea371-a4a8-4054-a473-4cdda585ef42-kube-api-access-l9srj\") on node \"crc\" DevicePath \"\"" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.106127 4762 generic.go:334] "Generic (PLEG): container finished" podID="44aea371-a4a8-4054-a473-4cdda585ef42" containerID="44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431" exitCode=0 Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.106216 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerDied","Data":"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431"} Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.106271 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk7tx" event={"ID":"44aea371-a4a8-4054-a473-4cdda585ef42","Type":"ContainerDied","Data":"5c2ac824b5830d5bfd24f16b167a90424101c49e4d3fda7d51f313ba6470a1b8"} Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.106300 4762 scope.go:117] "RemoveContainer" containerID="44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.106400 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk7tx" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.137020 4762 scope.go:117] "RemoveContainer" containerID="dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.144549 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.155913 4762 scope.go:117] "RemoveContainer" containerID="2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.156511 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk7tx"] Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.177554 4762 scope.go:117] "RemoveContainer" containerID="44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431" Oct 09 14:23:04 crc kubenswrapper[4762]: E1009 14:23:04.178059 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431\": container with ID starting with 44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431 not found: ID does not exist" containerID="44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.178092 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431"} err="failed to get container status \"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431\": rpc error: code = NotFound desc = could not find container \"44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431\": container with ID starting with 44a923783f39af9076e8a41270cbfa6a88f9e35a7277630844519c929a247431 not found: ID does not exist" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.178118 4762 scope.go:117] "RemoveContainer" containerID="dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda" Oct 09 14:23:04 crc kubenswrapper[4762]: E1009 14:23:04.178514 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda\": container with ID starting with dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda not found: ID does not exist" containerID="dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.178537 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda"} err="failed to get container status \"dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda\": rpc error: code = NotFound desc = could not find container \"dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda\": container with ID starting with dd0576d2176781ce857ec04c2fcf404b8bf6f03883f21faa5b3c401530986bda not found: ID does not exist" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.178575 4762 scope.go:117] "RemoveContainer" containerID="2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676" Oct 09 14:23:04 crc kubenswrapper[4762]: E1009 14:23:04.178843 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676\": container with ID starting with 2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676 not found: ID does not exist" containerID="2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.178874 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676"} err="failed to get container status \"2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676\": rpc error: code = NotFound desc = could not find container \"2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676\": container with ID starting with 2ddd7704884128758a0d4ce41fbecfc864f80e340a98de75276b9c384bbe0676 not found: ID does not exist" Oct 09 14:23:04 crc kubenswrapper[4762]: I1009 14:23:04.975211 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" path="/var/lib/kubelet/pods/44aea371-a4a8-4054-a473-4cdda585ef42/volumes" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.502766 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:26 crc kubenswrapper[4762]: E1009 14:24:26.503544 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="extract-utilities" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.503556 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="extract-utilities" Oct 09 14:24:26 crc kubenswrapper[4762]: E1009 14:24:26.503566 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="registry-server" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.503572 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="registry-server" Oct 09 14:24:26 crc kubenswrapper[4762]: E1009 14:24:26.503591 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="extract-content" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.503596 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="extract-content" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.503778 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="44aea371-a4a8-4054-a473-4cdda585ef42" containerName="registry-server" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.504754 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.513566 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.573094 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt2tp\" (UniqueName: \"kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.573173 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.573204 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.674067 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.674122 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.674214 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt2tp\" (UniqueName: \"kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.674808 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.674901 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.699799 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt2tp\" (UniqueName: \"kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp\") pod \"community-operators-wqjpr\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:26 crc kubenswrapper[4762]: I1009 14:24:26.824765 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:27 crc kubenswrapper[4762]: I1009 14:24:27.251060 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:27 crc kubenswrapper[4762]: I1009 14:24:27.731827 4762 generic.go:334] "Generic (PLEG): container finished" podID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerID="38f83e64c938eb97a654b43c17b18a5bbb27cf4934b226ea90c563531d9870cb" exitCode=0 Oct 09 14:24:27 crc kubenswrapper[4762]: I1009 14:24:27.731875 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerDied","Data":"38f83e64c938eb97a654b43c17b18a5bbb27cf4934b226ea90c563531d9870cb"} Oct 09 14:24:27 crc kubenswrapper[4762]: I1009 14:24:27.731907 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerStarted","Data":"4b04bf74f3590c4f3f46b95e8d070148ea30b8df9a5ade91059765d4a0995053"} Oct 09 14:24:29 crc kubenswrapper[4762]: I1009 14:24:29.760004 4762 generic.go:334] "Generic (PLEG): container finished" podID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerID="7421f15419b490c04853f37f9820ebdb2313d0f76b8ec510217ba546e4d46550" exitCode=0 Oct 09 14:24:29 crc kubenswrapper[4762]: I1009 14:24:29.760571 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerDied","Data":"7421f15419b490c04853f37f9820ebdb2313d0f76b8ec510217ba546e4d46550"} Oct 09 14:24:30 crc kubenswrapper[4762]: I1009 14:24:30.770141 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerStarted","Data":"7384788c37e53e15f7c2c3f1b299f2663f51e8de5c05023d48edfc185f02ee15"} Oct 09 14:24:30 crc kubenswrapper[4762]: I1009 14:24:30.793973 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wqjpr" podStartSLOduration=2.247335361 podStartE2EDuration="4.79394936s" podCreationTimestamp="2025-10-09 14:24:26 +0000 UTC" firstStartedPulling="2025-10-09 14:24:27.733754408 +0000 UTC m=+3543.507545467" lastFinishedPulling="2025-10-09 14:24:30.280368427 +0000 UTC m=+3546.054159466" observedRunningTime="2025-10-09 14:24:30.791583209 +0000 UTC m=+3546.565374268" watchObservedRunningTime="2025-10-09 14:24:30.79394936 +0000 UTC m=+3546.567740409" Oct 09 14:24:36 crc kubenswrapper[4762]: I1009 14:24:36.825356 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:36 crc kubenswrapper[4762]: I1009 14:24:36.825979 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:36 crc kubenswrapper[4762]: I1009 14:24:36.874100 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:37 crc kubenswrapper[4762]: I1009 14:24:37.876993 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:37 crc kubenswrapper[4762]: I1009 14:24:37.923592 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:39 crc kubenswrapper[4762]: I1009 14:24:39.835932 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wqjpr" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="registry-server" containerID="cri-o://7384788c37e53e15f7c2c3f1b299f2663f51e8de5c05023d48edfc185f02ee15" gracePeriod=2 Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.844997 4762 generic.go:334] "Generic (PLEG): container finished" podID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerID="7384788c37e53e15f7c2c3f1b299f2663f51e8de5c05023d48edfc185f02ee15" exitCode=0 Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.845077 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerDied","Data":"7384788c37e53e15f7c2c3f1b299f2663f51e8de5c05023d48edfc185f02ee15"} Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.902997 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.976096 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content\") pod \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.976171 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt2tp\" (UniqueName: \"kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp\") pod \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.976205 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities\") pod \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\" (UID: \"84a40cd6-2c88-4d8a-acfb-d530aed7efc6\") " Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.977154 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities" (OuterVolumeSpecName: "utilities") pod "84a40cd6-2c88-4d8a-acfb-d530aed7efc6" (UID: "84a40cd6-2c88-4d8a-acfb-d530aed7efc6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:24:40 crc kubenswrapper[4762]: I1009 14:24:40.981208 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp" (OuterVolumeSpecName: "kube-api-access-wt2tp") pod "84a40cd6-2c88-4d8a-acfb-d530aed7efc6" (UID: "84a40cd6-2c88-4d8a-acfb-d530aed7efc6"). InnerVolumeSpecName "kube-api-access-wt2tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.022581 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84a40cd6-2c88-4d8a-acfb-d530aed7efc6" (UID: "84a40cd6-2c88-4d8a-acfb-d530aed7efc6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.077383 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt2tp\" (UniqueName: \"kubernetes.io/projected/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-kube-api-access-wt2tp\") on node \"crc\" DevicePath \"\"" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.077411 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.077420 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84a40cd6-2c88-4d8a-acfb-d530aed7efc6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.854185 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wqjpr" event={"ID":"84a40cd6-2c88-4d8a-acfb-d530aed7efc6","Type":"ContainerDied","Data":"4b04bf74f3590c4f3f46b95e8d070148ea30b8df9a5ade91059765d4a0995053"} Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.854270 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wqjpr" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.854461 4762 scope.go:117] "RemoveContainer" containerID="7384788c37e53e15f7c2c3f1b299f2663f51e8de5c05023d48edfc185f02ee15" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.874881 4762 scope.go:117] "RemoveContainer" containerID="7421f15419b490c04853f37f9820ebdb2313d0f76b8ec510217ba546e4d46550" Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.889918 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.898002 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wqjpr"] Oct 09 14:24:41 crc kubenswrapper[4762]: I1009 14:24:41.916395 4762 scope.go:117] "RemoveContainer" containerID="38f83e64c938eb97a654b43c17b18a5bbb27cf4934b226ea90c563531d9870cb" Oct 09 14:24:42 crc kubenswrapper[4762]: I1009 14:24:42.975439 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" path="/var/lib/kubelet/pods/84a40cd6-2c88-4d8a-acfb-d530aed7efc6/volumes" Oct 09 14:25:11 crc kubenswrapper[4762]: I1009 14:25:11.969219 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:25:11 crc kubenswrapper[4762]: I1009 14:25:11.969764 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.878753 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:35 crc kubenswrapper[4762]: E1009 14:25:35.879721 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="extract-content" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.879736 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="extract-content" Oct 09 14:25:35 crc kubenswrapper[4762]: E1009 14:25:35.879754 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="extract-utilities" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.879761 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="extract-utilities" Oct 09 14:25:35 crc kubenswrapper[4762]: E1009 14:25:35.879786 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="registry-server" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.879795 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="registry-server" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.879974 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a40cd6-2c88-4d8a-acfb-d530aed7efc6" containerName="registry-server" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.881249 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.894044 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.972307 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.972860 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:35 crc kubenswrapper[4762]: I1009 14:25:35.972895 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88wkd\" (UniqueName: \"kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.074377 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.074432 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88wkd\" (UniqueName: \"kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.074468 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.074971 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.074974 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.097462 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88wkd\" (UniqueName: \"kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd\") pod \"redhat-operators-94ssx\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.211361 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:36 crc kubenswrapper[4762]: I1009 14:25:36.699362 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:37 crc kubenswrapper[4762]: I1009 14:25:37.279252 4762 generic.go:334] "Generic (PLEG): container finished" podID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerID="3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a" exitCode=0 Oct 09 14:25:37 crc kubenswrapper[4762]: I1009 14:25:37.279564 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerDied","Data":"3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a"} Oct 09 14:25:37 crc kubenswrapper[4762]: I1009 14:25:37.279595 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerStarted","Data":"2d2010fdc1537783c45eb605109714fce876e74d5cb7a486ae49f6388d491bd0"} Oct 09 14:25:38 crc kubenswrapper[4762]: I1009 14:25:38.288782 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerStarted","Data":"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32"} Oct 09 14:25:39 crc kubenswrapper[4762]: I1009 14:25:39.301672 4762 generic.go:334] "Generic (PLEG): container finished" podID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerID="7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32" exitCode=0 Oct 09 14:25:39 crc kubenswrapper[4762]: I1009 14:25:39.301735 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerDied","Data":"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32"} Oct 09 14:25:40 crc kubenswrapper[4762]: I1009 14:25:40.309921 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerStarted","Data":"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2"} Oct 09 14:25:40 crc kubenswrapper[4762]: I1009 14:25:40.330991 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-94ssx" podStartSLOduration=2.774884651 podStartE2EDuration="5.330969438s" podCreationTimestamp="2025-10-09 14:25:35 +0000 UTC" firstStartedPulling="2025-10-09 14:25:37.280946692 +0000 UTC m=+3613.054737741" lastFinishedPulling="2025-10-09 14:25:39.837031499 +0000 UTC m=+3615.610822528" observedRunningTime="2025-10-09 14:25:40.325247499 +0000 UTC m=+3616.099038558" watchObservedRunningTime="2025-10-09 14:25:40.330969438 +0000 UTC m=+3616.104760467" Oct 09 14:25:41 crc kubenswrapper[4762]: I1009 14:25:41.969528 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:25:41 crc kubenswrapper[4762]: I1009 14:25:41.969920 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:25:46 crc kubenswrapper[4762]: I1009 14:25:46.211473 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:46 crc kubenswrapper[4762]: I1009 14:25:46.212865 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:46 crc kubenswrapper[4762]: I1009 14:25:46.254465 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:46 crc kubenswrapper[4762]: I1009 14:25:46.391794 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:46 crc kubenswrapper[4762]: I1009 14:25:46.508130 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:48 crc kubenswrapper[4762]: I1009 14:25:48.364678 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-94ssx" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="registry-server" containerID="cri-o://60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2" gracePeriod=2 Oct 09 14:25:48 crc kubenswrapper[4762]: I1009 14:25:48.836207 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.012728 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content\") pod \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.012835 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities\") pod \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.013073 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88wkd\" (UniqueName: \"kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd\") pod \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\" (UID: \"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e\") " Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.013583 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities" (OuterVolumeSpecName: "utilities") pod "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" (UID: "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.013792 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.031218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd" (OuterVolumeSpecName: "kube-api-access-88wkd") pod "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" (UID: "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e"). InnerVolumeSpecName "kube-api-access-88wkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.115940 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88wkd\" (UniqueName: \"kubernetes.io/projected/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-kube-api-access-88wkd\") on node \"crc\" DevicePath \"\"" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.200653 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" (UID: "3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.216486 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.374403 4762 generic.go:334] "Generic (PLEG): container finished" podID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerID="60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2" exitCode=0 Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.374471 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerDied","Data":"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2"} Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.374541 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-94ssx" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.374778 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-94ssx" event={"ID":"3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e","Type":"ContainerDied","Data":"2d2010fdc1537783c45eb605109714fce876e74d5cb7a486ae49f6388d491bd0"} Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.374805 4762 scope.go:117] "RemoveContainer" containerID="60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.399769 4762 scope.go:117] "RemoveContainer" containerID="7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.414687 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.420286 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-94ssx"] Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.444779 4762 scope.go:117] "RemoveContainer" containerID="3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.462962 4762 scope.go:117] "RemoveContainer" containerID="60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2" Oct 09 14:25:49 crc kubenswrapper[4762]: E1009 14:25:49.463308 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2\": container with ID starting with 60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2 not found: ID does not exist" containerID="60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.463335 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2"} err="failed to get container status \"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2\": rpc error: code = NotFound desc = could not find container \"60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2\": container with ID starting with 60df2760850414b3b40ed5bc0238dcfafcb2333d851425ec5b7741b06b20bbb2 not found: ID does not exist" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.463356 4762 scope.go:117] "RemoveContainer" containerID="7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32" Oct 09 14:25:49 crc kubenswrapper[4762]: E1009 14:25:49.463728 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32\": container with ID starting with 7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32 not found: ID does not exist" containerID="7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.463753 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32"} err="failed to get container status \"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32\": rpc error: code = NotFound desc = could not find container \"7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32\": container with ID starting with 7916359ebdede31b3a041337dfadb43b423ff6d0ac5d9b817f364a01c62a3b32 not found: ID does not exist" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.463767 4762 scope.go:117] "RemoveContainer" containerID="3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a" Oct 09 14:25:49 crc kubenswrapper[4762]: E1009 14:25:49.464089 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a\": container with ID starting with 3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a not found: ID does not exist" containerID="3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a" Oct 09 14:25:49 crc kubenswrapper[4762]: I1009 14:25:49.464113 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a"} err="failed to get container status \"3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a\": rpc error: code = NotFound desc = could not find container \"3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a\": container with ID starting with 3b0f58393ec5b6a40ca78710b9cb462c617d8e8de7815ea010b32d5a1b30cc8a not found: ID does not exist" Oct 09 14:25:50 crc kubenswrapper[4762]: I1009 14:25:50.973719 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" path="/var/lib/kubelet/pods/3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e/volumes" Oct 09 14:26:11 crc kubenswrapper[4762]: I1009 14:26:11.969812 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:26:11 crc kubenswrapper[4762]: I1009 14:26:11.970308 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:26:11 crc kubenswrapper[4762]: I1009 14:26:11.970351 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:26:11 crc kubenswrapper[4762]: I1009 14:26:11.970805 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:26:11 crc kubenswrapper[4762]: I1009 14:26:11.970864 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914" gracePeriod=600 Oct 09 14:26:12 crc kubenswrapper[4762]: I1009 14:26:12.550305 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914" exitCode=0 Oct 09 14:26:12 crc kubenswrapper[4762]: I1009 14:26:12.550403 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914"} Oct 09 14:26:12 crc kubenswrapper[4762]: I1009 14:26:12.550934 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96"} Oct 09 14:26:12 crc kubenswrapper[4762]: I1009 14:26:12.550959 4762 scope.go:117] "RemoveContainer" containerID="5bd398d0aad3464db677ef65fde2ba9c81e274fc4099a8035b14053dc80af0a4" Oct 09 14:28:41 crc kubenswrapper[4762]: I1009 14:28:41.969100 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:28:41 crc kubenswrapper[4762]: I1009 14:28:41.970818 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:29:11 crc kubenswrapper[4762]: I1009 14:29:11.969513 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:29:11 crc kubenswrapper[4762]: I1009 14:29:11.970126 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:29:41 crc kubenswrapper[4762]: I1009 14:29:41.969285 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:29:41 crc kubenswrapper[4762]: I1009 14:29:41.969824 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:29:41 crc kubenswrapper[4762]: I1009 14:29:41.969911 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:29:41 crc kubenswrapper[4762]: I1009 14:29:41.970449 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:29:41 crc kubenswrapper[4762]: I1009 14:29:41.970494 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" gracePeriod=600 Oct 09 14:29:42 crc kubenswrapper[4762]: E1009 14:29:42.094948 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:29:42 crc kubenswrapper[4762]: I1009 14:29:42.099236 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" exitCode=0 Oct 09 14:29:42 crc kubenswrapper[4762]: I1009 14:29:42.099284 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96"} Oct 09 14:29:42 crc kubenswrapper[4762]: I1009 14:29:42.099316 4762 scope.go:117] "RemoveContainer" containerID="174b6f1c2316a03feb878d1b3dd36d419c917068d0b4437f72d6439d83ad9914" Oct 09 14:29:42 crc kubenswrapper[4762]: I1009 14:29:42.099959 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:29:42 crc kubenswrapper[4762]: E1009 14:29:42.100205 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:29:53 crc kubenswrapper[4762]: I1009 14:29:53.966310 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:29:53 crc kubenswrapper[4762]: E1009 14:29:53.967862 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.146357 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7"] Oct 09 14:30:00 crc kubenswrapper[4762]: E1009 14:30:00.148437 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="extract-content" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.148662 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="extract-content" Oct 09 14:30:00 crc kubenswrapper[4762]: E1009 14:30:00.148764 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="registry-server" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.148826 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="registry-server" Oct 09 14:30:00 crc kubenswrapper[4762]: E1009 14:30:00.148902 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="extract-utilities" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.148959 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="extract-utilities" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.149160 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fdec5af-596d-4f0f-ae5d-5f4bd8991e5e" containerName="registry-server" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.150035 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.153686 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.154109 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.190532 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7"] Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.322677 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptk59\" (UniqueName: \"kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.322778 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.322828 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.424309 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.424433 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.424527 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptk59\" (UniqueName: \"kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.425348 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.430225 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.440805 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptk59\" (UniqueName: \"kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59\") pod \"collect-profiles-29333670-6nhf7\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.480313 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:00 crc kubenswrapper[4762]: I1009 14:30:00.932896 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7"] Oct 09 14:30:01 crc kubenswrapper[4762]: I1009 14:30:01.253324 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" event={"ID":"52bffb38-0b4e-4e17-a6b2-4be46c24d322","Type":"ContainerStarted","Data":"a9a90843032f9a4e0ff176622afafcaaed68b48ca2954977378b0c8340690cbc"} Oct 09 14:30:01 crc kubenswrapper[4762]: I1009 14:30:01.253700 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" event={"ID":"52bffb38-0b4e-4e17-a6b2-4be46c24d322","Type":"ContainerStarted","Data":"7a51b9a83d01af674cb78df466c17d93fd2edb20f59e595ad735d2ea65fca224"} Oct 09 14:30:01 crc kubenswrapper[4762]: I1009 14:30:01.275944 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" podStartSLOduration=1.275924324 podStartE2EDuration="1.275924324s" podCreationTimestamp="2025-10-09 14:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:30:01.271874768 +0000 UTC m=+3877.045665807" watchObservedRunningTime="2025-10-09 14:30:01.275924324 +0000 UTC m=+3877.049715373" Oct 09 14:30:02 crc kubenswrapper[4762]: I1009 14:30:02.262097 4762 generic.go:334] "Generic (PLEG): container finished" podID="52bffb38-0b4e-4e17-a6b2-4be46c24d322" containerID="a9a90843032f9a4e0ff176622afafcaaed68b48ca2954977378b0c8340690cbc" exitCode=0 Oct 09 14:30:02 crc kubenswrapper[4762]: I1009 14:30:02.262139 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" event={"ID":"52bffb38-0b4e-4e17-a6b2-4be46c24d322","Type":"ContainerDied","Data":"a9a90843032f9a4e0ff176622afafcaaed68b48ca2954977378b0c8340690cbc"} Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.516519 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.667849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptk59\" (UniqueName: \"kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59\") pod \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.667918 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume\") pod \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.667979 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume\") pod \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\" (UID: \"52bffb38-0b4e-4e17-a6b2-4be46c24d322\") " Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.668487 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume" (OuterVolumeSpecName: "config-volume") pod "52bffb38-0b4e-4e17-a6b2-4be46c24d322" (UID: "52bffb38-0b4e-4e17-a6b2-4be46c24d322"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.673535 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "52bffb38-0b4e-4e17-a6b2-4be46c24d322" (UID: "52bffb38-0b4e-4e17-a6b2-4be46c24d322"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.673697 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59" (OuterVolumeSpecName: "kube-api-access-ptk59") pod "52bffb38-0b4e-4e17-a6b2-4be46c24d322" (UID: "52bffb38-0b4e-4e17-a6b2-4be46c24d322"). InnerVolumeSpecName "kube-api-access-ptk59". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.769284 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/52bffb38-0b4e-4e17-a6b2-4be46c24d322-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.769342 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptk59\" (UniqueName: \"kubernetes.io/projected/52bffb38-0b4e-4e17-a6b2-4be46c24d322-kube-api-access-ptk59\") on node \"crc\" DevicePath \"\"" Oct 09 14:30:03 crc kubenswrapper[4762]: I1009 14:30:03.769356 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/52bffb38-0b4e-4e17-a6b2-4be46c24d322-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.276181 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" event={"ID":"52bffb38-0b4e-4e17-a6b2-4be46c24d322","Type":"ContainerDied","Data":"7a51b9a83d01af674cb78df466c17d93fd2edb20f59e595ad735d2ea65fca224"} Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.276231 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a51b9a83d01af674cb78df466c17d93fd2edb20f59e595ad735d2ea65fca224" Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.276244 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7" Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.384276 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p"] Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.393345 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333625-ggp7p"] Oct 09 14:30:04 crc kubenswrapper[4762]: I1009 14:30:04.976051 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c95067a5-19b0-4c94-9331-b820ca60432f" path="/var/lib/kubelet/pods/c95067a5-19b0-4c94-9331-b820ca60432f/volumes" Oct 09 14:30:08 crc kubenswrapper[4762]: I1009 14:30:08.964730 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:30:08 crc kubenswrapper[4762]: E1009 14:30:08.965012 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:30:22 crc kubenswrapper[4762]: I1009 14:30:22.965323 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:30:22 crc kubenswrapper[4762]: E1009 14:30:22.966190 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:30:31 crc kubenswrapper[4762]: I1009 14:30:31.094076 4762 scope.go:117] "RemoveContainer" containerID="ef1b1c79ac5170da5cd3679764a9beb1b0772009b17ff33dca8c1980d5a4f560" Oct 09 14:30:35 crc kubenswrapper[4762]: I1009 14:30:35.964674 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:30:35 crc kubenswrapper[4762]: E1009 14:30:35.965513 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:30:47 crc kubenswrapper[4762]: I1009 14:30:47.965561 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:30:47 crc kubenswrapper[4762]: E1009 14:30:47.966421 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:30:59 crc kubenswrapper[4762]: I1009 14:30:59.965531 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:30:59 crc kubenswrapper[4762]: E1009 14:30:59.966349 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:31:14 crc kubenswrapper[4762]: I1009 14:31:14.969358 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:31:14 crc kubenswrapper[4762]: E1009 14:31:14.970385 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:31:27 crc kubenswrapper[4762]: I1009 14:31:27.965365 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:31:27 crc kubenswrapper[4762]: E1009 14:31:27.966858 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:31:41 crc kubenswrapper[4762]: I1009 14:31:41.965898 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:31:41 crc kubenswrapper[4762]: E1009 14:31:41.966614 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:31:55 crc kubenswrapper[4762]: I1009 14:31:55.965146 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:31:55 crc kubenswrapper[4762]: E1009 14:31:55.965833 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:32:07 crc kubenswrapper[4762]: I1009 14:32:07.965502 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:32:07 crc kubenswrapper[4762]: E1009 14:32:07.966302 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:32:18 crc kubenswrapper[4762]: I1009 14:32:18.966345 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:32:18 crc kubenswrapper[4762]: E1009 14:32:18.967690 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:32:31 crc kubenswrapper[4762]: I1009 14:32:31.965144 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:32:31 crc kubenswrapper[4762]: E1009 14:32:31.966295 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:32:46 crc kubenswrapper[4762]: I1009 14:32:46.965613 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:32:46 crc kubenswrapper[4762]: E1009 14:32:46.966511 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.298261 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:01 crc kubenswrapper[4762]: E1009 14:33:01.299243 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52bffb38-0b4e-4e17-a6b2-4be46c24d322" containerName="collect-profiles" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.299260 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="52bffb38-0b4e-4e17-a6b2-4be46c24d322" containerName="collect-profiles" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.299414 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="52bffb38-0b4e-4e17-a6b2-4be46c24d322" containerName="collect-profiles" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.301356 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.314476 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.462954 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.463188 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.463274 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j7f6\" (UniqueName: \"kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.564138 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.564193 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j7f6\" (UniqueName: \"kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.564230 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.564688 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.564776 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.590792 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j7f6\" (UniqueName: \"kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6\") pod \"redhat-marketplace-vklw9\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.621663 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:01 crc kubenswrapper[4762]: I1009 14:33:01.964861 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:33:01 crc kubenswrapper[4762]: E1009 14:33:01.965132 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:33:02 crc kubenswrapper[4762]: I1009 14:33:02.037298 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:02 crc kubenswrapper[4762]: I1009 14:33:02.575308 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerStarted","Data":"c02775ced2221879bcbf7b1d372acddac4644f5442cc4bc5368744b9d8b1456a"} Oct 09 14:33:03 crc kubenswrapper[4762]: I1009 14:33:03.585331 4762 generic.go:334] "Generic (PLEG): container finished" podID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerID="53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729" exitCode=0 Oct 09 14:33:03 crc kubenswrapper[4762]: I1009 14:33:03.585395 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerDied","Data":"53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729"} Oct 09 14:33:03 crc kubenswrapper[4762]: I1009 14:33:03.588270 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:33:05 crc kubenswrapper[4762]: I1009 14:33:05.599256 4762 generic.go:334] "Generic (PLEG): container finished" podID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerID="e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3" exitCode=0 Oct 09 14:33:05 crc kubenswrapper[4762]: I1009 14:33:05.599377 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerDied","Data":"e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3"} Oct 09 14:33:06 crc kubenswrapper[4762]: I1009 14:33:06.608491 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerStarted","Data":"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95"} Oct 09 14:33:06 crc kubenswrapper[4762]: I1009 14:33:06.633274 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vklw9" podStartSLOduration=3.150072975 podStartE2EDuration="5.633252218s" podCreationTimestamp="2025-10-09 14:33:01 +0000 UTC" firstStartedPulling="2025-10-09 14:33:03.587951161 +0000 UTC m=+4059.361742200" lastFinishedPulling="2025-10-09 14:33:06.071130404 +0000 UTC m=+4061.844921443" observedRunningTime="2025-10-09 14:33:06.626972724 +0000 UTC m=+4062.400763763" watchObservedRunningTime="2025-10-09 14:33:06.633252218 +0000 UTC m=+4062.407043257" Oct 09 14:33:11 crc kubenswrapper[4762]: I1009 14:33:11.622307 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:11 crc kubenswrapper[4762]: I1009 14:33:11.624161 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:11 crc kubenswrapper[4762]: I1009 14:33:11.937081 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:11 crc kubenswrapper[4762]: I1009 14:33:11.982677 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:12 crc kubenswrapper[4762]: I1009 14:33:12.169007 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:13 crc kubenswrapper[4762]: I1009 14:33:13.657004 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vklw9" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="registry-server" containerID="cri-o://deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95" gracePeriod=2 Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.557211 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.642087 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities\") pod \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.642199 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content\") pod \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.642234 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j7f6\" (UniqueName: \"kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6\") pod \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\" (UID: \"0eb7df00-282f-4cf8-ab0e-d810a7bd685f\") " Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.643145 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities" (OuterVolumeSpecName: "utilities") pod "0eb7df00-282f-4cf8-ab0e-d810a7bd685f" (UID: "0eb7df00-282f-4cf8-ab0e-d810a7bd685f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.655271 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0eb7df00-282f-4cf8-ab0e-d810a7bd685f" (UID: "0eb7df00-282f-4cf8-ab0e-d810a7bd685f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.655789 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6" (OuterVolumeSpecName: "kube-api-access-2j7f6") pod "0eb7df00-282f-4cf8-ab0e-d810a7bd685f" (UID: "0eb7df00-282f-4cf8-ab0e-d810a7bd685f"). InnerVolumeSpecName "kube-api-access-2j7f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.664884 4762 generic.go:334] "Generic (PLEG): container finished" podID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerID="deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95" exitCode=0 Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.664925 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerDied","Data":"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95"} Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.664950 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vklw9" event={"ID":"0eb7df00-282f-4cf8-ab0e-d810a7bd685f","Type":"ContainerDied","Data":"c02775ced2221879bcbf7b1d372acddac4644f5442cc4bc5368744b9d8b1456a"} Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.664928 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vklw9" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.664966 4762 scope.go:117] "RemoveContainer" containerID="deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.683297 4762 scope.go:117] "RemoveContainer" containerID="e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.700010 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.702288 4762 scope.go:117] "RemoveContainer" containerID="53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.705864 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vklw9"] Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.727691 4762 scope.go:117] "RemoveContainer" containerID="deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95" Oct 09 14:33:14 crc kubenswrapper[4762]: E1009 14:33:14.728240 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95\": container with ID starting with deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95 not found: ID does not exist" containerID="deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.728292 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95"} err="failed to get container status \"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95\": rpc error: code = NotFound desc = could not find container \"deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95\": container with ID starting with deba9602ba7cd745746896695b49104a750ca0a524d79dbb63e258a57aa8cd95 not found: ID does not exist" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.728321 4762 scope.go:117] "RemoveContainer" containerID="e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3" Oct 09 14:33:14 crc kubenswrapper[4762]: E1009 14:33:14.728666 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3\": container with ID starting with e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3 not found: ID does not exist" containerID="e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.728704 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3"} err="failed to get container status \"e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3\": rpc error: code = NotFound desc = could not find container \"e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3\": container with ID starting with e41c8ddadfaed6c37eb4f238a790b00277ded48c829728eba0b06603eba2dfc3 not found: ID does not exist" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.728726 4762 scope.go:117] "RemoveContainer" containerID="53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729" Oct 09 14:33:14 crc kubenswrapper[4762]: E1009 14:33:14.728960 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729\": container with ID starting with 53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729 not found: ID does not exist" containerID="53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.728994 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729"} err="failed to get container status \"53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729\": rpc error: code = NotFound desc = could not find container \"53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729\": container with ID starting with 53dd572ee7401f28f551de8d7e49b51412f9b04b2c8bfd87d4e87b81db9a6729 not found: ID does not exist" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.744252 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.744314 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.744330 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j7f6\" (UniqueName: \"kubernetes.io/projected/0eb7df00-282f-4cf8-ab0e-d810a7bd685f-kube-api-access-2j7f6\") on node \"crc\" DevicePath \"\"" Oct 09 14:33:14 crc kubenswrapper[4762]: I1009 14:33:14.979189 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" path="/var/lib/kubelet/pods/0eb7df00-282f-4cf8-ab0e-d810a7bd685f/volumes" Oct 09 14:33:15 crc kubenswrapper[4762]: I1009 14:33:15.965672 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:33:15 crc kubenswrapper[4762]: E1009 14:33:15.965904 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:33:28 crc kubenswrapper[4762]: I1009 14:33:28.969460 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:33:28 crc kubenswrapper[4762]: E1009 14:33:28.971272 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:33:39 crc kubenswrapper[4762]: I1009 14:33:39.965381 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:33:39 crc kubenswrapper[4762]: E1009 14:33:39.966205 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:33:51 crc kubenswrapper[4762]: I1009 14:33:51.965715 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:33:51 crc kubenswrapper[4762]: E1009 14:33:51.966344 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:34:06 crc kubenswrapper[4762]: I1009 14:34:06.965231 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:34:06 crc kubenswrapper[4762]: E1009 14:34:06.966224 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.496040 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:18 crc kubenswrapper[4762]: E1009 14:34:18.496958 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="extract-content" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.496972 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="extract-content" Oct 09 14:34:18 crc kubenswrapper[4762]: E1009 14:34:18.496997 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="extract-utilities" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.497003 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="extract-utilities" Oct 09 14:34:18 crc kubenswrapper[4762]: E1009 14:34:18.497026 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="registry-server" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.497032 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="registry-server" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.497168 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb7df00-282f-4cf8-ab0e-d810a7bd685f" containerName="registry-server" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.498304 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.507246 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.689058 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.689119 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr4s4\" (UniqueName: \"kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.689206 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.790386 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.790438 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr4s4\" (UniqueName: \"kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.790486 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.790978 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.791006 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.816103 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr4s4\" (UniqueName: \"kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4\") pod \"certified-operators-gxgf9\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:18 crc kubenswrapper[4762]: I1009 14:34:18.817380 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:19 crc kubenswrapper[4762]: I1009 14:34:19.332989 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:20 crc kubenswrapper[4762]: I1009 14:34:20.150677 4762 generic.go:334] "Generic (PLEG): container finished" podID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerID="1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb" exitCode=0 Oct 09 14:34:20 crc kubenswrapper[4762]: I1009 14:34:20.150790 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerDied","Data":"1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb"} Oct 09 14:34:20 crc kubenswrapper[4762]: I1009 14:34:20.152520 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerStarted","Data":"36d0cae879140315d93a49b3dea22879c395e51707e8fb2a1e42a42ef4b7d991"} Oct 09 14:34:21 crc kubenswrapper[4762]: I1009 14:34:21.160974 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerStarted","Data":"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588"} Oct 09 14:34:21 crc kubenswrapper[4762]: I1009 14:34:21.966143 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:34:21 crc kubenswrapper[4762]: E1009 14:34:21.966930 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:34:22 crc kubenswrapper[4762]: I1009 14:34:22.170458 4762 generic.go:334] "Generic (PLEG): container finished" podID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerID="fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588" exitCode=0 Oct 09 14:34:22 crc kubenswrapper[4762]: I1009 14:34:22.170505 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerDied","Data":"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588"} Oct 09 14:34:23 crc kubenswrapper[4762]: I1009 14:34:23.179406 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerStarted","Data":"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea"} Oct 09 14:34:23 crc kubenswrapper[4762]: I1009 14:34:23.200151 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gxgf9" podStartSLOduration=2.721537451 podStartE2EDuration="5.200132402s" podCreationTimestamp="2025-10-09 14:34:18 +0000 UTC" firstStartedPulling="2025-10-09 14:34:20.15227643 +0000 UTC m=+4135.926067479" lastFinishedPulling="2025-10-09 14:34:22.630871391 +0000 UTC m=+4138.404662430" observedRunningTime="2025-10-09 14:34:23.198241923 +0000 UTC m=+4138.972032962" watchObservedRunningTime="2025-10-09 14:34:23.200132402 +0000 UTC m=+4138.973923441" Oct 09 14:34:28 crc kubenswrapper[4762]: I1009 14:34:28.817893 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:28 crc kubenswrapper[4762]: I1009 14:34:28.818392 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:28 crc kubenswrapper[4762]: I1009 14:34:28.863779 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:29 crc kubenswrapper[4762]: I1009 14:34:29.255728 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:29 crc kubenswrapper[4762]: I1009 14:34:29.307447 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.232269 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gxgf9" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="registry-server" containerID="cri-o://8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea" gracePeriod=2 Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.651146 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.775612 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content\") pod \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.775694 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities\") pod \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.775944 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr4s4\" (UniqueName: \"kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4\") pod \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\" (UID: \"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3\") " Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.776867 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities" (OuterVolumeSpecName: "utilities") pod "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" (UID: "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.783118 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4" (OuterVolumeSpecName: "kube-api-access-dr4s4") pod "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" (UID: "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3"). InnerVolumeSpecName "kube-api-access-dr4s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.839231 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" (UID: "f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.877967 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.878025 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:34:31 crc kubenswrapper[4762]: I1009 14:34:31.878038 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr4s4\" (UniqueName: \"kubernetes.io/projected/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3-kube-api-access-dr4s4\") on node \"crc\" DevicePath \"\"" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.242632 4762 generic.go:334] "Generic (PLEG): container finished" podID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerID="8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea" exitCode=0 Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.242724 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxgf9" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.242743 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerDied","Data":"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea"} Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.243135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxgf9" event={"ID":"f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3","Type":"ContainerDied","Data":"36d0cae879140315d93a49b3dea22879c395e51707e8fb2a1e42a42ef4b7d991"} Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.243158 4762 scope.go:117] "RemoveContainer" containerID="8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.261340 4762 scope.go:117] "RemoveContainer" containerID="fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.293227 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.299940 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gxgf9"] Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.312971 4762 scope.go:117] "RemoveContainer" containerID="1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.329208 4762 scope.go:117] "RemoveContainer" containerID="8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea" Oct 09 14:34:32 crc kubenswrapper[4762]: E1009 14:34:32.330644 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea\": container with ID starting with 8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea not found: ID does not exist" containerID="8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.330710 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea"} err="failed to get container status \"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea\": rpc error: code = NotFound desc = could not find container \"8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea\": container with ID starting with 8a54a0c5f6f1500d674d323a20e38048de28216e61a6f777b47ee117e213b5ea not found: ID does not exist" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.330743 4762 scope.go:117] "RemoveContainer" containerID="fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588" Oct 09 14:34:32 crc kubenswrapper[4762]: E1009 14:34:32.331090 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588\": container with ID starting with fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588 not found: ID does not exist" containerID="fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.331124 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588"} err="failed to get container status \"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588\": rpc error: code = NotFound desc = could not find container \"fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588\": container with ID starting with fa5a5e4a8a8dfb1f993072f31a2609aa924413c15b17d9c9f65d3039b032a588 not found: ID does not exist" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.331144 4762 scope.go:117] "RemoveContainer" containerID="1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb" Oct 09 14:34:32 crc kubenswrapper[4762]: E1009 14:34:32.332648 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb\": container with ID starting with 1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb not found: ID does not exist" containerID="1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.332674 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb"} err="failed to get container status \"1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb\": rpc error: code = NotFound desc = could not find container \"1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb\": container with ID starting with 1ca18c249f3be0517d6a5195f5cca5260722cbacffa97dc2000076a062b8fccb not found: ID does not exist" Oct 09 14:34:32 crc kubenswrapper[4762]: I1009 14:34:32.976542 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" path="/var/lib/kubelet/pods/f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3/volumes" Oct 09 14:34:36 crc kubenswrapper[4762]: I1009 14:34:36.964932 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:34:36 crc kubenswrapper[4762]: E1009 14:34:36.965485 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:34:47 crc kubenswrapper[4762]: I1009 14:34:47.965584 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:34:48 crc kubenswrapper[4762]: I1009 14:34:48.387457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf"} Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.926890 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:08 crc kubenswrapper[4762]: E1009 14:35:08.927952 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="registry-server" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.927972 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="registry-server" Oct 09 14:35:08 crc kubenswrapper[4762]: E1009 14:35:08.927996 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="extract-content" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.928004 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="extract-content" Oct 09 14:35:08 crc kubenswrapper[4762]: E1009 14:35:08.928019 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="extract-utilities" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.928026 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="extract-utilities" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.928209 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f838ea2f-cc3a-4ac5-9bb4-2a71fc609dc3" containerName="registry-server" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.929508 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:08 crc kubenswrapper[4762]: I1009 14:35:08.939284 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.011946 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r78wg\" (UniqueName: \"kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.012039 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.012112 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.113547 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.113609 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r78wg\" (UniqueName: \"kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.113672 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.114132 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.114135 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.132624 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r78wg\" (UniqueName: \"kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg\") pod \"community-operators-gbhsj\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.263159 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:09 crc kubenswrapper[4762]: I1009 14:35:09.766203 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:10 crc kubenswrapper[4762]: I1009 14:35:10.549562 4762 generic.go:334] "Generic (PLEG): container finished" podID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerID="6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa" exitCode=0 Oct 09 14:35:10 crc kubenswrapper[4762]: I1009 14:35:10.549629 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerDied","Data":"6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa"} Oct 09 14:35:10 crc kubenswrapper[4762]: I1009 14:35:10.549899 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerStarted","Data":"9cbd52252a470e7367f385b2a3b4ff105ac205dbd9e14c1e7fa04da7a82e387e"} Oct 09 14:35:12 crc kubenswrapper[4762]: I1009 14:35:12.568154 4762 generic.go:334] "Generic (PLEG): container finished" podID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerID="8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9" exitCode=0 Oct 09 14:35:12 crc kubenswrapper[4762]: I1009 14:35:12.568238 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerDied","Data":"8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9"} Oct 09 14:35:13 crc kubenswrapper[4762]: I1009 14:35:13.576980 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerStarted","Data":"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3"} Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.264271 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.265009 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.314729 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.337152 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gbhsj" podStartSLOduration=8.850565201 podStartE2EDuration="11.337133886s" podCreationTimestamp="2025-10-09 14:35:08 +0000 UTC" firstStartedPulling="2025-10-09 14:35:10.551130918 +0000 UTC m=+4186.324921947" lastFinishedPulling="2025-10-09 14:35:13.037699593 +0000 UTC m=+4188.811490632" observedRunningTime="2025-10-09 14:35:13.595100618 +0000 UTC m=+4189.368891687" watchObservedRunningTime="2025-10-09 14:35:19.337133886 +0000 UTC m=+4195.110924925" Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.659583 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:19 crc kubenswrapper[4762]: I1009 14:35:19.708065 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:21 crc kubenswrapper[4762]: I1009 14:35:21.634917 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gbhsj" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="registry-server" containerID="cri-o://0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3" gracePeriod=2 Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.028846 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.187764 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities\") pod \"df7de3cd-f079-453a-816e-b65a297d3e5b\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.187820 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content\") pod \"df7de3cd-f079-453a-816e-b65a297d3e5b\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.187944 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r78wg\" (UniqueName: \"kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg\") pod \"df7de3cd-f079-453a-816e-b65a297d3e5b\" (UID: \"df7de3cd-f079-453a-816e-b65a297d3e5b\") " Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.190405 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities" (OuterVolumeSpecName: "utilities") pod "df7de3cd-f079-453a-816e-b65a297d3e5b" (UID: "df7de3cd-f079-453a-816e-b65a297d3e5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.196803 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg" (OuterVolumeSpecName: "kube-api-access-r78wg") pod "df7de3cd-f079-453a-816e-b65a297d3e5b" (UID: "df7de3cd-f079-453a-816e-b65a297d3e5b"). InnerVolumeSpecName "kube-api-access-r78wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.290119 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.290164 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r78wg\" (UniqueName: \"kubernetes.io/projected/df7de3cd-f079-453a-816e-b65a297d3e5b-kube-api-access-r78wg\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.446065 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df7de3cd-f079-453a-816e-b65a297d3e5b" (UID: "df7de3cd-f079-453a-816e-b65a297d3e5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.581106 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7de3cd-f079-453a-816e-b65a297d3e5b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.647437 4762 generic.go:334] "Generic (PLEG): container finished" podID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerID="0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3" exitCode=0 Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.647477 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gbhsj" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.647496 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerDied","Data":"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3"} Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.647532 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gbhsj" event={"ID":"df7de3cd-f079-453a-816e-b65a297d3e5b","Type":"ContainerDied","Data":"9cbd52252a470e7367f385b2a3b4ff105ac205dbd9e14c1e7fa04da7a82e387e"} Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.647549 4762 scope.go:117] "RemoveContainer" containerID="0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.664611 4762 scope.go:117] "RemoveContainer" containerID="8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.682666 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.687595 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gbhsj"] Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.714827 4762 scope.go:117] "RemoveContainer" containerID="6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.729512 4762 scope.go:117] "RemoveContainer" containerID="0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3" Oct 09 14:35:22 crc kubenswrapper[4762]: E1009 14:35:22.730295 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3\": container with ID starting with 0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3 not found: ID does not exist" containerID="0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.730416 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3"} err="failed to get container status \"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3\": rpc error: code = NotFound desc = could not find container \"0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3\": container with ID starting with 0f5492a51fca3ecba386a2c56bd014f0c848b4c0edd93330d135dfd1c2d38fd3 not found: ID does not exist" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.730506 4762 scope.go:117] "RemoveContainer" containerID="8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9" Oct 09 14:35:22 crc kubenswrapper[4762]: E1009 14:35:22.730883 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9\": container with ID starting with 8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9 not found: ID does not exist" containerID="8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.730903 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9"} err="failed to get container status \"8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9\": rpc error: code = NotFound desc = could not find container \"8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9\": container with ID starting with 8627f95950f6c65980f6f02a1dc1e691a2ac51ed2ae390e8612d379c3ab365d9 not found: ID does not exist" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.730917 4762 scope.go:117] "RemoveContainer" containerID="6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa" Oct 09 14:35:22 crc kubenswrapper[4762]: E1009 14:35:22.731342 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa\": container with ID starting with 6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa not found: ID does not exist" containerID="6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.731392 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa"} err="failed to get container status \"6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa\": rpc error: code = NotFound desc = could not find container \"6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa\": container with ID starting with 6d3078cf55510863927482c6a414c9fe55b02f3598b9c84d0372aaecba4205aa not found: ID does not exist" Oct 09 14:35:22 crc kubenswrapper[4762]: I1009 14:35:22.979349 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" path="/var/lib/kubelet/pods/df7de3cd-f079-453a-816e-b65a297d3e5b/volumes" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.035365 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:41 crc kubenswrapper[4762]: E1009 14:35:41.037074 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="extract-content" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.037095 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="extract-content" Oct 09 14:35:41 crc kubenswrapper[4762]: E1009 14:35:41.037110 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="registry-server" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.037118 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="registry-server" Oct 09 14:35:41 crc kubenswrapper[4762]: E1009 14:35:41.037128 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="extract-utilities" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.037137 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="extract-utilities" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.037339 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7de3cd-f079-453a-816e-b65a297d3e5b" containerName="registry-server" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.039299 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.049228 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.141018 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66b9p\" (UniqueName: \"kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.141126 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.141158 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.242592 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66b9p\" (UniqueName: \"kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.242723 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.242757 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.243213 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.243222 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.269926 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66b9p\" (UniqueName: \"kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p\") pod \"redhat-operators-565jn\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.359413 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.582650 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.786757 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerStarted","Data":"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae"} Oct 09 14:35:41 crc kubenswrapper[4762]: I1009 14:35:41.786857 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerStarted","Data":"58a9350653389b8487f6724161ab547413d386b15875d3938440c934be2f6176"} Oct 09 14:35:42 crc kubenswrapper[4762]: I1009 14:35:42.797493 4762 generic.go:334] "Generic (PLEG): container finished" podID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerID="43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae" exitCode=0 Oct 09 14:35:42 crc kubenswrapper[4762]: I1009 14:35:42.798817 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerDied","Data":"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae"} Oct 09 14:35:44 crc kubenswrapper[4762]: I1009 14:35:44.815692 4762 generic.go:334] "Generic (PLEG): container finished" podID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerID="a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d" exitCode=0 Oct 09 14:35:44 crc kubenswrapper[4762]: I1009 14:35:44.815863 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerDied","Data":"a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d"} Oct 09 14:35:45 crc kubenswrapper[4762]: I1009 14:35:45.827738 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerStarted","Data":"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c"} Oct 09 14:35:45 crc kubenswrapper[4762]: I1009 14:35:45.847152 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-565jn" podStartSLOduration=2.346528705 podStartE2EDuration="4.847133609s" podCreationTimestamp="2025-10-09 14:35:41 +0000 UTC" firstStartedPulling="2025-10-09 14:35:42.80172128 +0000 UTC m=+4218.575512319" lastFinishedPulling="2025-10-09 14:35:45.302326174 +0000 UTC m=+4221.076117223" observedRunningTime="2025-10-09 14:35:45.845065164 +0000 UTC m=+4221.618856203" watchObservedRunningTime="2025-10-09 14:35:45.847133609 +0000 UTC m=+4221.620924678" Oct 09 14:35:51 crc kubenswrapper[4762]: I1009 14:35:51.360772 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:51 crc kubenswrapper[4762]: I1009 14:35:51.361367 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:51 crc kubenswrapper[4762]: I1009 14:35:51.406500 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:51 crc kubenswrapper[4762]: I1009 14:35:51.916060 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:51 crc kubenswrapper[4762]: I1009 14:35:51.956164 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:53 crc kubenswrapper[4762]: I1009 14:35:53.891532 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-565jn" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="registry-server" containerID="cri-o://5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c" gracePeriod=2 Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.299309 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.426066 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66b9p\" (UniqueName: \"kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p\") pod \"679133c9-2bb4-4b6d-bff4-0283339d311c\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.426139 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities\") pod \"679133c9-2bb4-4b6d-bff4-0283339d311c\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.426243 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content\") pod \"679133c9-2bb4-4b6d-bff4-0283339d311c\" (UID: \"679133c9-2bb4-4b6d-bff4-0283339d311c\") " Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.428014 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities" (OuterVolumeSpecName: "utilities") pod "679133c9-2bb4-4b6d-bff4-0283339d311c" (UID: "679133c9-2bb4-4b6d-bff4-0283339d311c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.434038 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p" (OuterVolumeSpecName: "kube-api-access-66b9p") pod "679133c9-2bb4-4b6d-bff4-0283339d311c" (UID: "679133c9-2bb4-4b6d-bff4-0283339d311c"). InnerVolumeSpecName "kube-api-access-66b9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.527913 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66b9p\" (UniqueName: \"kubernetes.io/projected/679133c9-2bb4-4b6d-bff4-0283339d311c-kube-api-access-66b9p\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.527952 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.904327 4762 generic.go:334] "Generic (PLEG): container finished" podID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerID="5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c" exitCode=0 Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.904409 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerDied","Data":"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c"} Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.904423 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-565jn" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.904468 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-565jn" event={"ID":"679133c9-2bb4-4b6d-bff4-0283339d311c","Type":"ContainerDied","Data":"58a9350653389b8487f6724161ab547413d386b15875d3938440c934be2f6176"} Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.904494 4762 scope.go:117] "RemoveContainer" containerID="5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.929608 4762 scope.go:117] "RemoveContainer" containerID="a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.953012 4762 scope.go:117] "RemoveContainer" containerID="43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.979445 4762 scope.go:117] "RemoveContainer" containerID="5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c" Oct 09 14:35:54 crc kubenswrapper[4762]: E1009 14:35:54.979932 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c\": container with ID starting with 5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c not found: ID does not exist" containerID="5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.979989 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c"} err="failed to get container status \"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c\": rpc error: code = NotFound desc = could not find container \"5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c\": container with ID starting with 5389d8547c4e538ee90077799e154191100b3057b2f2ca8ab827762109982e5c not found: ID does not exist" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.980022 4762 scope.go:117] "RemoveContainer" containerID="a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d" Oct 09 14:35:54 crc kubenswrapper[4762]: E1009 14:35:54.980467 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d\": container with ID starting with a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d not found: ID does not exist" containerID="a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.980496 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d"} err="failed to get container status \"a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d\": rpc error: code = NotFound desc = could not find container \"a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d\": container with ID starting with a881aeecdd18e5b5760809cd1b0be08860ae03fe58ccd7f23f728e468c7c563d not found: ID does not exist" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.980518 4762 scope.go:117] "RemoveContainer" containerID="43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae" Oct 09 14:35:54 crc kubenswrapper[4762]: E1009 14:35:54.980953 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae\": container with ID starting with 43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae not found: ID does not exist" containerID="43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae" Oct 09 14:35:54 crc kubenswrapper[4762]: I1009 14:35:54.980980 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae"} err="failed to get container status \"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae\": rpc error: code = NotFound desc = could not find container \"43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae\": container with ID starting with 43e53ed5d2a5e4e267c0d6bb5084f0f95d45123c5f78c2aee0baf3659cebf0ae not found: ID does not exist" Oct 09 14:35:55 crc kubenswrapper[4762]: I1009 14:35:55.636305 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "679133c9-2bb4-4b6d-bff4-0283339d311c" (UID: "679133c9-2bb4-4b6d-bff4-0283339d311c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:35:55 crc kubenswrapper[4762]: I1009 14:35:55.645112 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/679133c9-2bb4-4b6d-bff4-0283339d311c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:35:55 crc kubenswrapper[4762]: I1009 14:35:55.836828 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:55 crc kubenswrapper[4762]: I1009 14:35:55.842906 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-565jn"] Oct 09 14:35:56 crc kubenswrapper[4762]: I1009 14:35:56.974854 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" path="/var/lib/kubelet/pods/679133c9-2bb4-4b6d-bff4-0283339d311c/volumes" Oct 09 14:37:11 crc kubenswrapper[4762]: I1009 14:37:11.969706 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:37:11 crc kubenswrapper[4762]: I1009 14:37:11.970347 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:37:41 crc kubenswrapper[4762]: I1009 14:37:41.969460 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:37:41 crc kubenswrapper[4762]: I1009 14:37:41.970298 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:38:11 crc kubenswrapper[4762]: I1009 14:38:11.969165 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:38:11 crc kubenswrapper[4762]: I1009 14:38:11.969727 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:38:11 crc kubenswrapper[4762]: I1009 14:38:11.969767 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:38:11 crc kubenswrapper[4762]: I1009 14:38:11.970145 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:38:11 crc kubenswrapper[4762]: I1009 14:38:11.970212 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf" gracePeriod=600 Oct 09 14:38:12 crc kubenswrapper[4762]: I1009 14:38:12.943523 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf" exitCode=0 Oct 09 14:38:12 crc kubenswrapper[4762]: I1009 14:38:12.943602 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf"} Oct 09 14:38:12 crc kubenswrapper[4762]: I1009 14:38:12.944055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c"} Oct 09 14:38:12 crc kubenswrapper[4762]: I1009 14:38:12.944081 4762 scope.go:117] "RemoveContainer" containerID="4d1df28214271a51e41c9293c0c5b00b7686991cb22ae7b5b11ff19723d63a96" Oct 09 14:40:41 crc kubenswrapper[4762]: I1009 14:40:41.969595 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:40:41 crc kubenswrapper[4762]: I1009 14:40:41.970791 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:41:11 crc kubenswrapper[4762]: I1009 14:41:11.969295 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:41:11 crc kubenswrapper[4762]: I1009 14:41:11.969906 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:41:41 crc kubenswrapper[4762]: I1009 14:41:41.969260 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:41:41 crc kubenswrapper[4762]: I1009 14:41:41.969880 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:41:41 crc kubenswrapper[4762]: I1009 14:41:41.969937 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:41:41 crc kubenswrapper[4762]: I1009 14:41:41.970602 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:41:41 crc kubenswrapper[4762]: I1009 14:41:41.970699 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" gracePeriod=600 Oct 09 14:41:42 crc kubenswrapper[4762]: E1009 14:41:42.092340 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:41:42 crc kubenswrapper[4762]: I1009 14:41:42.429356 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" exitCode=0 Oct 09 14:41:42 crc kubenswrapper[4762]: I1009 14:41:42.429409 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c"} Oct 09 14:41:42 crc kubenswrapper[4762]: I1009 14:41:42.429474 4762 scope.go:117] "RemoveContainer" containerID="e11f4c8cf3d0070adc51c57c959d652c9fcff91f5501980ee4d1a803ecbfeabf" Oct 09 14:41:42 crc kubenswrapper[4762]: I1009 14:41:42.430011 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:41:42 crc kubenswrapper[4762]: E1009 14:41:42.430281 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:41:48 crc kubenswrapper[4762]: I1009 14:41:48.857853 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-dlp9b"] Oct 09 14:41:48 crc kubenswrapper[4762]: I1009 14:41:48.863929 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-dlp9b"] Oct 09 14:41:48 crc kubenswrapper[4762]: I1009 14:41:48.974046 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bdef39d-b324-48f1-a0f7-0f52130e848b" path="/var/lib/kubelet/pods/2bdef39d-b324-48f1-a0f7-0f52130e848b/volumes" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.003834 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-pbf8x"] Oct 09 14:41:49 crc kubenswrapper[4762]: E1009 14:41:49.004466 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="extract-utilities" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.004575 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="extract-utilities" Oct 09 14:41:49 crc kubenswrapper[4762]: E1009 14:41:49.004700 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="extract-content" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.004783 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="extract-content" Oct 09 14:41:49 crc kubenswrapper[4762]: E1009 14:41:49.004871 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="registry-server" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.004958 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="registry-server" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.005246 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="679133c9-2bb4-4b6d-bff4-0283339d311c" containerName="registry-server" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.005827 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.007845 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.008730 4762 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nhclq" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.009245 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.013081 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-pbf8x"] Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.013826 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.156929 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.156987 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.157090 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w77gr\" (UniqueName: \"kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.259371 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.259448 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.259515 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w77gr\" (UniqueName: \"kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.260166 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.262545 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.282975 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w77gr\" (UniqueName: \"kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr\") pod \"crc-storage-crc-pbf8x\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.331287 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.753831 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-pbf8x"] Oct 09 14:41:49 crc kubenswrapper[4762]: I1009 14:41:49.763785 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:41:50 crc kubenswrapper[4762]: I1009 14:41:50.494584 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pbf8x" event={"ID":"83f9d30e-80de-4d55-8c35-d9dc7542280e","Type":"ContainerStarted","Data":"2ce697bf25f6d031e4a1392263ad3acfd8cfd49f1eacf499be64012801aa9e6a"} Oct 09 14:41:51 crc kubenswrapper[4762]: I1009 14:41:51.503957 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pbf8x" event={"ID":"83f9d30e-80de-4d55-8c35-d9dc7542280e","Type":"ContainerStarted","Data":"a0cab21376028a3574a4ea5be7cc483bf0cb7aeaec3a6bb6a6f0fd4853e56ef5"} Oct 09 14:41:51 crc kubenswrapper[4762]: I1009 14:41:51.524494 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-pbf8x" podStartSLOduration=2.31848871 podStartE2EDuration="3.524470913s" podCreationTimestamp="2025-10-09 14:41:48 +0000 UTC" firstStartedPulling="2025-10-09 14:41:49.763538859 +0000 UTC m=+4585.537329898" lastFinishedPulling="2025-10-09 14:41:50.969521062 +0000 UTC m=+4586.743312101" observedRunningTime="2025-10-09 14:41:51.517995234 +0000 UTC m=+4587.291786273" watchObservedRunningTime="2025-10-09 14:41:51.524470913 +0000 UTC m=+4587.298261952" Oct 09 14:41:52 crc kubenswrapper[4762]: I1009 14:41:52.511797 4762 generic.go:334] "Generic (PLEG): container finished" podID="83f9d30e-80de-4d55-8c35-d9dc7542280e" containerID="a0cab21376028a3574a4ea5be7cc483bf0cb7aeaec3a6bb6a6f0fd4853e56ef5" exitCode=0 Oct 09 14:41:52 crc kubenswrapper[4762]: I1009 14:41:52.511899 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pbf8x" event={"ID":"83f9d30e-80de-4d55-8c35-d9dc7542280e","Type":"ContainerDied","Data":"a0cab21376028a3574a4ea5be7cc483bf0cb7aeaec3a6bb6a6f0fd4853e56ef5"} Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.791283 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.966782 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w77gr\" (UniqueName: \"kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr\") pod \"83f9d30e-80de-4d55-8c35-d9dc7542280e\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.966847 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt\") pod \"83f9d30e-80de-4d55-8c35-d9dc7542280e\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.966890 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage\") pod \"83f9d30e-80de-4d55-8c35-d9dc7542280e\" (UID: \"83f9d30e-80de-4d55-8c35-d9dc7542280e\") " Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.966994 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "83f9d30e-80de-4d55-8c35-d9dc7542280e" (UID: "83f9d30e-80de-4d55-8c35-d9dc7542280e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.967252 4762 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/83f9d30e-80de-4d55-8c35-d9dc7542280e-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.972084 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr" (OuterVolumeSpecName: "kube-api-access-w77gr") pod "83f9d30e-80de-4d55-8c35-d9dc7542280e" (UID: "83f9d30e-80de-4d55-8c35-d9dc7542280e"). InnerVolumeSpecName "kube-api-access-w77gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:41:53 crc kubenswrapper[4762]: I1009 14:41:53.983815 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "83f9d30e-80de-4d55-8c35-d9dc7542280e" (UID: "83f9d30e-80de-4d55-8c35-d9dc7542280e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:41:54 crc kubenswrapper[4762]: I1009 14:41:54.070151 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w77gr\" (UniqueName: \"kubernetes.io/projected/83f9d30e-80de-4d55-8c35-d9dc7542280e-kube-api-access-w77gr\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:54 crc kubenswrapper[4762]: I1009 14:41:54.070524 4762 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/83f9d30e-80de-4d55-8c35-d9dc7542280e-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:54 crc kubenswrapper[4762]: I1009 14:41:54.527766 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-pbf8x" event={"ID":"83f9d30e-80de-4d55-8c35-d9dc7542280e","Type":"ContainerDied","Data":"2ce697bf25f6d031e4a1392263ad3acfd8cfd49f1eacf499be64012801aa9e6a"} Oct 09 14:41:54 crc kubenswrapper[4762]: I1009 14:41:54.527807 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ce697bf25f6d031e4a1392263ad3acfd8cfd49f1eacf499be64012801aa9e6a" Oct 09 14:41:54 crc kubenswrapper[4762]: I1009 14:41:54.527813 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-pbf8x" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.683314 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-pbf8x"] Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.687527 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-pbf8x"] Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.830140 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-flpth"] Oct 09 14:41:55 crc kubenswrapper[4762]: E1009 14:41:55.830559 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83f9d30e-80de-4d55-8c35-d9dc7542280e" containerName="storage" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.830584 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="83f9d30e-80de-4d55-8c35-d9dc7542280e" containerName="storage" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.830766 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="83f9d30e-80de-4d55-8c35-d9dc7542280e" containerName="storage" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.831372 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.833442 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.833474 4762 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-nhclq" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.833691 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.833692 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.839327 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-flpth"] Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.965103 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:41:55 crc kubenswrapper[4762]: E1009 14:41:55.965322 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.997917 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kmh\" (UniqueName: \"kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.998066 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:55 crc kubenswrapper[4762]: I1009 14:41:55.998132 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.100064 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.100124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.100234 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kmh\" (UniqueName: \"kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.100396 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.101433 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.201721 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kmh\" (UniqueName: \"kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh\") pod \"crc-storage-crc-flpth\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.452228 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.870078 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-flpth"] Oct 09 14:41:56 crc kubenswrapper[4762]: I1009 14:41:56.974236 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83f9d30e-80de-4d55-8c35-d9dc7542280e" path="/var/lib/kubelet/pods/83f9d30e-80de-4d55-8c35-d9dc7542280e/volumes" Oct 09 14:41:57 crc kubenswrapper[4762]: I1009 14:41:57.552242 4762 generic.go:334] "Generic (PLEG): container finished" podID="30549e04-7d5e-49cc-b0e0-1de7d5484d4d" containerID="e298795d2e8a3c65c8c4f28a1019cbc620bb7381239dcadc81bc5e9015bcf781" exitCode=0 Oct 09 14:41:57 crc kubenswrapper[4762]: I1009 14:41:57.552343 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-flpth" event={"ID":"30549e04-7d5e-49cc-b0e0-1de7d5484d4d","Type":"ContainerDied","Data":"e298795d2e8a3c65c8c4f28a1019cbc620bb7381239dcadc81bc5e9015bcf781"} Oct 09 14:41:57 crc kubenswrapper[4762]: I1009 14:41:57.552561 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-flpth" event={"ID":"30549e04-7d5e-49cc-b0e0-1de7d5484d4d","Type":"ContainerStarted","Data":"8aa75801aac3311299efcae75a14c18f27e04c6ed73049fb10a87f34318ea139"} Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.827071 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.937541 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage\") pod \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.937735 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt\") pod \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.937842 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7kmh\" (UniqueName: \"kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh\") pod \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\" (UID: \"30549e04-7d5e-49cc-b0e0-1de7d5484d4d\") " Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.937928 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "30549e04-7d5e-49cc-b0e0-1de7d5484d4d" (UID: "30549e04-7d5e-49cc-b0e0-1de7d5484d4d"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.938142 4762 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.943254 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh" (OuterVolumeSpecName: "kube-api-access-h7kmh") pod "30549e04-7d5e-49cc-b0e0-1de7d5484d4d" (UID: "30549e04-7d5e-49cc-b0e0-1de7d5484d4d"). InnerVolumeSpecName "kube-api-access-h7kmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:41:58 crc kubenswrapper[4762]: I1009 14:41:58.954513 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "30549e04-7d5e-49cc-b0e0-1de7d5484d4d" (UID: "30549e04-7d5e-49cc-b0e0-1de7d5484d4d"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:41:59 crc kubenswrapper[4762]: I1009 14:41:59.039574 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7kmh\" (UniqueName: \"kubernetes.io/projected/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-kube-api-access-h7kmh\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:59 crc kubenswrapper[4762]: I1009 14:41:59.039631 4762 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/30549e04-7d5e-49cc-b0e0-1de7d5484d4d-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 09 14:41:59 crc kubenswrapper[4762]: I1009 14:41:59.567640 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-flpth" event={"ID":"30549e04-7d5e-49cc-b0e0-1de7d5484d4d","Type":"ContainerDied","Data":"8aa75801aac3311299efcae75a14c18f27e04c6ed73049fb10a87f34318ea139"} Oct 09 14:41:59 crc kubenswrapper[4762]: I1009 14:41:59.568173 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8aa75801aac3311299efcae75a14c18f27e04c6ed73049fb10a87f34318ea139" Oct 09 14:41:59 crc kubenswrapper[4762]: I1009 14:41:59.567719 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-flpth" Oct 09 14:42:08 crc kubenswrapper[4762]: I1009 14:42:08.965158 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:42:08 crc kubenswrapper[4762]: E1009 14:42:08.965875 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:42:21 crc kubenswrapper[4762]: I1009 14:42:21.965537 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:42:21 crc kubenswrapper[4762]: E1009 14:42:21.966568 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:42:31 crc kubenswrapper[4762]: I1009 14:42:31.330362 4762 scope.go:117] "RemoveContainer" containerID="f2340c19b01df8a1c6798b2c2daa291801c168b1de24bacf25f0e083c9792dcf" Oct 09 14:42:34 crc kubenswrapper[4762]: I1009 14:42:34.969342 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:42:34 crc kubenswrapper[4762]: E1009 14:42:34.970038 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:42:47 crc kubenswrapper[4762]: I1009 14:42:47.965804 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:42:47 crc kubenswrapper[4762]: E1009 14:42:47.968146 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:42:59 crc kubenswrapper[4762]: I1009 14:42:59.964982 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:42:59 crc kubenswrapper[4762]: E1009 14:42:59.965818 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:43:12 crc kubenswrapper[4762]: I1009 14:43:12.965422 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:43:12 crc kubenswrapper[4762]: E1009 14:43:12.966156 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:43:24 crc kubenswrapper[4762]: I1009 14:43:24.969579 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:43:24 crc kubenswrapper[4762]: E1009 14:43:24.970433 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:43:38 crc kubenswrapper[4762]: I1009 14:43:38.965663 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:43:38 crc kubenswrapper[4762]: E1009 14:43:38.966429 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:43:52 crc kubenswrapper[4762]: I1009 14:43:52.965279 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:43:52 crc kubenswrapper[4762]: E1009 14:43:52.966163 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:44:06 crc kubenswrapper[4762]: I1009 14:44:06.965326 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:44:06 crc kubenswrapper[4762]: E1009 14:44:06.966001 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:44:18 crc kubenswrapper[4762]: I1009 14:44:18.964688 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:44:18 crc kubenswrapper[4762]: E1009 14:44:18.965398 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:44:33 crc kubenswrapper[4762]: I1009 14:44:33.965367 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:44:33 crc kubenswrapper[4762]: E1009 14:44:33.966103 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:44:47 crc kubenswrapper[4762]: I1009 14:44:47.965041 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:44:47 crc kubenswrapper[4762]: E1009 14:44:47.965935 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.140427 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl"] Oct 09 14:45:00 crc kubenswrapper[4762]: E1009 14:45:00.141342 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30549e04-7d5e-49cc-b0e0-1de7d5484d4d" containerName="storage" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.141357 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="30549e04-7d5e-49cc-b0e0-1de7d5484d4d" containerName="storage" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.141512 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="30549e04-7d5e-49cc-b0e0-1de7d5484d4d" containerName="storage" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.142128 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.144988 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.145341 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.146566 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl"] Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.298770 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.298875 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.299105 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkhh6\" (UniqueName: \"kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.400877 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.400981 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.401077 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkhh6\" (UniqueName: \"kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.402198 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.413843 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.418478 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkhh6\" (UniqueName: \"kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6\") pod \"collect-profiles-29333685-flqvl\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.462117 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:00 crc kubenswrapper[4762]: I1009 14:45:00.920357 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl"] Oct 09 14:45:01 crc kubenswrapper[4762]: I1009 14:45:01.814854 4762 generic.go:334] "Generic (PLEG): container finished" podID="d426004b-92b4-4193-bdf8-b40d9e48d018" containerID="c4c3f741550a42f3ac98a464227145e1d291dd39ee396e1157df5a809a7f4b36" exitCode=0 Oct 09 14:45:01 crc kubenswrapper[4762]: I1009 14:45:01.814924 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" event={"ID":"d426004b-92b4-4193-bdf8-b40d9e48d018","Type":"ContainerDied","Data":"c4c3f741550a42f3ac98a464227145e1d291dd39ee396e1157df5a809a7f4b36"} Oct 09 14:45:01 crc kubenswrapper[4762]: I1009 14:45:01.815164 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" event={"ID":"d426004b-92b4-4193-bdf8-b40d9e48d018","Type":"ContainerStarted","Data":"7b5e228b407ab0324ad71cf5410854fb5d82aa3e3d2670fa126790f25a6422bf"} Oct 09 14:45:01 crc kubenswrapper[4762]: I1009 14:45:01.965426 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:45:01 crc kubenswrapper[4762]: E1009 14:45:01.965710 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.097070 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.239031 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume\") pod \"d426004b-92b4-4193-bdf8-b40d9e48d018\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.239218 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume\") pod \"d426004b-92b4-4193-bdf8-b40d9e48d018\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.239257 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkhh6\" (UniqueName: \"kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6\") pod \"d426004b-92b4-4193-bdf8-b40d9e48d018\" (UID: \"d426004b-92b4-4193-bdf8-b40d9e48d018\") " Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.239937 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume" (OuterVolumeSpecName: "config-volume") pod "d426004b-92b4-4193-bdf8-b40d9e48d018" (UID: "d426004b-92b4-4193-bdf8-b40d9e48d018"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.244697 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d426004b-92b4-4193-bdf8-b40d9e48d018" (UID: "d426004b-92b4-4193-bdf8-b40d9e48d018"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.245092 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6" (OuterVolumeSpecName: "kube-api-access-rkhh6") pod "d426004b-92b4-4193-bdf8-b40d9e48d018" (UID: "d426004b-92b4-4193-bdf8-b40d9e48d018"). InnerVolumeSpecName "kube-api-access-rkhh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.340919 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d426004b-92b4-4193-bdf8-b40d9e48d018-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.340981 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkhh6\" (UniqueName: \"kubernetes.io/projected/d426004b-92b4-4193-bdf8-b40d9e48d018-kube-api-access-rkhh6\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.340993 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d426004b-92b4-4193-bdf8-b40d9e48d018-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.830945 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" event={"ID":"d426004b-92b4-4193-bdf8-b40d9e48d018","Type":"ContainerDied","Data":"7b5e228b407ab0324ad71cf5410854fb5d82aa3e3d2670fa126790f25a6422bf"} Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.831012 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b5e228b407ab0324ad71cf5410854fb5d82aa3e3d2670fa126790f25a6422bf" Oct 09 14:45:03 crc kubenswrapper[4762]: I1009 14:45:03.831055 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl" Oct 09 14:45:04 crc kubenswrapper[4762]: I1009 14:45:04.163471 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt"] Oct 09 14:45:04 crc kubenswrapper[4762]: I1009 14:45:04.168689 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333640-cg4gt"] Oct 09 14:45:04 crc kubenswrapper[4762]: I1009 14:45:04.974356 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfaa232b-5c62-4c60-ad6c-486aa735578e" path="/var/lib/kubelet/pods/dfaa232b-5c62-4c60-ad6c-486aa735578e/volumes" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.126903 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:10 crc kubenswrapper[4762]: E1009 14:45:10.127582 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d426004b-92b4-4193-bdf8-b40d9e48d018" containerName="collect-profiles" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.127598 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d426004b-92b4-4193-bdf8-b40d9e48d018" containerName="collect-profiles" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.127805 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d426004b-92b4-4193-bdf8-b40d9e48d018" containerName="collect-profiles" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.128684 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.131360 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.131379 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-ljnx5" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.131610 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.131778 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.132902 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.142497 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.236307 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.236430 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwjr5\" (UniqueName: \"kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.236487 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.336284 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.337553 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.337614 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.337657 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.337718 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwjr5\" (UniqueName: \"kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.338938 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.338978 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.347193 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.383158 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwjr5\" (UniqueName: \"kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5\") pod \"dnsmasq-dns-5d7b5456f5-z5rnq\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.439166 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.439219 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.439341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z9tj\" (UniqueName: \"kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.452833 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.540500 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z9tj\" (UniqueName: \"kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.541493 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.542680 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.542617 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.543394 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.589795 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z9tj\" (UniqueName: \"kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj\") pod \"dnsmasq-dns-98ddfc8f-vj8kn\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.654803 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:10 crc kubenswrapper[4762]: I1009 14:45:10.958801 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.007919 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.217628 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.218829 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.221285 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.221507 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.221592 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.224419 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wg8dn" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.224427 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.228392 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358457 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358509 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358533 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358559 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358582 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwb67\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358607 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358623 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358663 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.358856 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460330 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460374 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460406 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460435 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwb67\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460463 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.460813 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461000 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461034 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461092 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461261 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461613 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.461722 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.462094 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.463123 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.463156 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e0c286c9173eff4b55d4fbe412e6005eea55968be4a0454aba028e2b78d27ba4/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.464945 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.465039 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.465286 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.481110 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwb67\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.495075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.516505 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.517874 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.520666 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.520910 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.521287 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-xkdg6" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.521445 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.525060 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.527575 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.577187 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663715 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77wbg\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663772 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663794 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663849 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663900 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663935 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663967 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.663998 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.664027 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.765812 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.765882 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766279 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77wbg\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766319 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766343 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766390 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766433 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766464 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.766490 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.767410 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.767734 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.768218 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.769255 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.772884 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.772921 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2dcf0aa2556825a990326d84417e0004092a6f2826f39ec7e81efe6c379be0e9/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.773067 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.775481 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.778524 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.791197 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77wbg\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.808989 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.845141 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.898357 4762 generic.go:334] "Generic (PLEG): container finished" podID="a521771d-024e-42f3-9600-e0ec9eece88f" containerID="2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b" exitCode=0 Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.898432 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" event={"ID":"a521771d-024e-42f3-9600-e0ec9eece88f","Type":"ContainerDied","Data":"2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b"} Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.898464 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" event={"ID":"a521771d-024e-42f3-9600-e0ec9eece88f","Type":"ContainerStarted","Data":"77a014effc749861b10d81c04ac6a901124c8cbb1e0248371d8bbff811fca340"} Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.902370 4762 generic.go:334] "Generic (PLEG): container finished" podID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerID="b7b50ce357513270b70bd4714e36ec2e98c27320853d7e3c6388953cb540fb53" exitCode=0 Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.902410 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" event={"ID":"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b","Type":"ContainerDied","Data":"b7b50ce357513270b70bd4714e36ec2e98c27320853d7e3c6388953cb540fb53"} Oct 09 14:45:11 crc kubenswrapper[4762]: I1009 14:45:11.902435 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" event={"ID":"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b","Type":"ContainerStarted","Data":"35132b8ebdd0c5a78bf71029fdcdff0113771d64ce4132e6af57af2e745b34a1"} Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.060941 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:45:12 crc kubenswrapper[4762]: W1009 14:45:12.079587 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4c7de04_1c4e_42b2_a965_4c046e51b272.slice/crio-1b3602ca07b3f72d0e7d583ee9f32109c897d47d3359020c681feb25a4ddf8c4 WatchSource:0}: Error finding container 1b3602ca07b3f72d0e7d583ee9f32109c897d47d3359020c681feb25a4ddf8c4: Status 404 returned error can't find the container with id 1b3602ca07b3f72d0e7d583ee9f32109c897d47d3359020c681feb25a4ddf8c4 Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.345037 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:45:12 crc kubenswrapper[4762]: W1009 14:45:12.347370 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb96c5d4d_5b68_434c_a891_aceb2ea69e00.slice/crio-0ef26664af556d1a57df8fefa518296ddc8fec9075c43e036fa267e15302fa8b WatchSource:0}: Error finding container 0ef26664af556d1a57df8fefa518296ddc8fec9075c43e036fa267e15302fa8b: Status 404 returned error can't find the container with id 0ef26664af556d1a57df8fefa518296ddc8fec9075c43e036fa267e15302fa8b Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.913265 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" event={"ID":"a521771d-024e-42f3-9600-e0ec9eece88f","Type":"ContainerStarted","Data":"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc"} Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.913672 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.914728 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerStarted","Data":"1b3602ca07b3f72d0e7d583ee9f32109c897d47d3359020c681feb25a4ddf8c4"} Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.917078 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" event={"ID":"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b","Type":"ContainerStarted","Data":"f693ee2e2facc5b2644543dc7aac044520f0c3895010e58af77c7f30efcad974"} Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.917234 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.922517 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerStarted","Data":"0ef26664af556d1a57df8fefa518296ddc8fec9075c43e036fa267e15302fa8b"} Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.937058 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" podStartSLOduration=2.937039117 podStartE2EDuration="2.937039117s" podCreationTimestamp="2025-10-09 14:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:12.934201364 +0000 UTC m=+4788.707992403" watchObservedRunningTime="2025-10-09 14:45:12.937039117 +0000 UTC m=+4788.710830156" Oct 09 14:45:12 crc kubenswrapper[4762]: I1009 14:45:12.956048 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" podStartSLOduration=2.956028019 podStartE2EDuration="2.956028019s" podCreationTimestamp="2025-10-09 14:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:12.951923933 +0000 UTC m=+4788.725714982" watchObservedRunningTime="2025-10-09 14:45:12.956028019 +0000 UTC m=+4788.729819058" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.157606 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.158690 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.161512 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.161925 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-lb9rq" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.171763 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.286593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-config-data\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.286667 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kolla-config\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.286759 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx58z\" (UniqueName: \"kubernetes.io/projected/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kube-api-access-hx58z\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.388620 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx58z\" (UniqueName: \"kubernetes.io/projected/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kube-api-access-hx58z\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.388770 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-config-data\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.388797 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kolla-config\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.389837 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kolla-config\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.389882 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-config-data\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.425988 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx58z\" (UniqueName: \"kubernetes.io/projected/1e89b8e6-2d52-403c-b7a9-b59ad3b199ba-kube-api-access-hx58z\") pod \"memcached-0\" (UID: \"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba\") " pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.476459 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.925976 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 14:45:13 crc kubenswrapper[4762]: W1009 14:45:13.931822 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e89b8e6_2d52_403c_b7a9_b59ad3b199ba.slice/crio-e2b3bcb95ebc37c59cf7296e27052905658539c29572029e312489b36d993463 WatchSource:0}: Error finding container e2b3bcb95ebc37c59cf7296e27052905658539c29572029e312489b36d993463: Status 404 returned error can't find the container with id e2b3bcb95ebc37c59cf7296e27052905658539c29572029e312489b36d993463 Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.933126 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerStarted","Data":"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d"} Oct 09 14:45:13 crc kubenswrapper[4762]: I1009 14:45:13.937178 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerStarted","Data":"90404abe09e95d47da5c85d592fc0dc360d18dfff674505bb6ad9579e5bb2992"} Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.506770 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.509046 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.510860 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-bqvsd" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.512924 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.513686 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.513839 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.513940 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.520685 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.534132 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.559816 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.563867 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.565588 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.566575 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bnjp5" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.567697 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.567713 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.573333 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604298 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604349 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604375 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604405 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604431 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604446 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604477 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604500 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp7zw\" (UniqueName: \"kubernetes.io/projected/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kube-api-access-cp7zw\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604520 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-secrets\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604554 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604579 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604618 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfbtn\" (UniqueName: \"kubernetes.io/projected/c87d7be6-a929-4a4a-bd38-184fb1405635-kube-api-access-sfbtn\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604643 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604686 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604717 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604747 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.604766 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705725 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705772 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705794 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705830 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfbtn\" (UniqueName: \"kubernetes.io/projected/c87d7be6-a929-4a4a-bd38-184fb1405635-kube-api-access-sfbtn\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705854 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705878 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705902 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705921 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705951 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.705991 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706018 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706043 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706067 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706094 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706111 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706132 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706151 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp7zw\" (UniqueName: \"kubernetes.io/projected/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kube-api-access-cp7zw\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.706167 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-secrets\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.707332 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.707525 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.708114 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.708257 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c4b6426-b32c-4839-b63d-75b2995ddc8c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.709129 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.709403 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.709529 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c87d7be6-a929-4a4a-bd38-184fb1405635-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.709649 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c87d7be6-a929-4a4a-bd38-184fb1405635-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.711302 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.711315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.711972 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.712088 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.712089 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.712110 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b01b74af731aaf7166dc9bcd0eb0e65861115b19233410fed9259d5b070270de/globalmount\"" pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.712118 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7e5d80cc3a5e4589482c49814455681274c28f089f438f0bd3891ec0a2a40759/globalmount\"" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.712681 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-secrets\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.719581 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c87d7be6-a929-4a4a-bd38-184fb1405635-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.720444 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c4b6426-b32c-4839-b63d-75b2995ddc8c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.725670 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfbtn\" (UniqueName: \"kubernetes.io/projected/c87d7be6-a929-4a4a-bd38-184fb1405635-kube-api-access-sfbtn\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.729756 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp7zw\" (UniqueName: \"kubernetes.io/projected/2c4b6426-b32c-4839-b63d-75b2995ddc8c-kube-api-access-cp7zw\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.758719 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6dd5a975-8a4f-4985-abbe-232ab17fb414\") pod \"openstack-cell1-galera-0\" (UID: \"c87d7be6-a929-4a4a-bd38-184fb1405635\") " pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.765315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b253eb76-72d0-45e4-a56e-ffc80409ba8a\") pod \"openstack-galera-0\" (UID: \"2c4b6426-b32c-4839-b63d-75b2995ddc8c\") " pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.833218 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.877721 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.948634 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba","Type":"ContainerStarted","Data":"9f55d83d9086b8f7732a1e1af849a15c6483ea861c7eb0ac71f6b3fc00ebc7a1"} Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.951526 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.951551 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1e89b8e6-2d52-403c-b7a9-b59ad3b199ba","Type":"ContainerStarted","Data":"e2b3bcb95ebc37c59cf7296e27052905658539c29572029e312489b36d993463"} Oct 09 14:45:14 crc kubenswrapper[4762]: I1009 14:45:14.974183 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.974166181 podStartE2EDuration="1.974166181s" podCreationTimestamp="2025-10-09 14:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:14.966890011 +0000 UTC m=+4790.740681050" watchObservedRunningTime="2025-10-09 14:45:14.974166181 +0000 UTC m=+4790.747957220" Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.265077 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 14:45:15 crc kubenswrapper[4762]: W1009 14:45:15.265625 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c4b6426_b32c_4839_b63d_75b2995ddc8c.slice/crio-dc4b17147d7c6bba11611b09d12d5e2c90a8e0cf5d10be3ecb1bb6f4aef4c51c WatchSource:0}: Error finding container dc4b17147d7c6bba11611b09d12d5e2c90a8e0cf5d10be3ecb1bb6f4aef4c51c: Status 404 returned error can't find the container with id dc4b17147d7c6bba11611b09d12d5e2c90a8e0cf5d10be3ecb1bb6f4aef4c51c Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.332191 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 14:45:15 crc kubenswrapper[4762]: W1009 14:45:15.341208 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc87d7be6_a929_4a4a_bd38_184fb1405635.slice/crio-d560d77b4324f8e7e3dcfc47b537e8a4a08023d1fd46bec29306401d5594cab0 WatchSource:0}: Error finding container d560d77b4324f8e7e3dcfc47b537e8a4a08023d1fd46bec29306401d5594cab0: Status 404 returned error can't find the container with id d560d77b4324f8e7e3dcfc47b537e8a4a08023d1fd46bec29306401d5594cab0 Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.955315 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c87d7be6-a929-4a4a-bd38-184fb1405635","Type":"ContainerStarted","Data":"81ba1e9b916e3368e8f30e34a7b21fadb046a03248b4d97f9df98d26bd9ea96d"} Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.955715 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c87d7be6-a929-4a4a-bd38-184fb1405635","Type":"ContainerStarted","Data":"d560d77b4324f8e7e3dcfc47b537e8a4a08023d1fd46bec29306401d5594cab0"} Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.957497 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2c4b6426-b32c-4839-b63d-75b2995ddc8c","Type":"ContainerStarted","Data":"e4d7537afa2f860b6532ba5e997af8934af84312a862c1315f50b6eba8055cb2"} Oct 09 14:45:15 crc kubenswrapper[4762]: I1009 14:45:15.957531 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2c4b6426-b32c-4839-b63d-75b2995ddc8c","Type":"ContainerStarted","Data":"dc4b17147d7c6bba11611b09d12d5e2c90a8e0cf5d10be3ecb1bb6f4aef4c51c"} Oct 09 14:45:16 crc kubenswrapper[4762]: I1009 14:45:16.965123 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:45:16 crc kubenswrapper[4762]: E1009 14:45:16.965361 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:45:18 crc kubenswrapper[4762]: I1009 14:45:18.978266 4762 generic.go:334] "Generic (PLEG): container finished" podID="c87d7be6-a929-4a4a-bd38-184fb1405635" containerID="81ba1e9b916e3368e8f30e34a7b21fadb046a03248b4d97f9df98d26bd9ea96d" exitCode=0 Oct 09 14:45:18 crc kubenswrapper[4762]: I1009 14:45:18.978343 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c87d7be6-a929-4a4a-bd38-184fb1405635","Type":"ContainerDied","Data":"81ba1e9b916e3368e8f30e34a7b21fadb046a03248b4d97f9df98d26bd9ea96d"} Oct 09 14:45:18 crc kubenswrapper[4762]: I1009 14:45:18.979863 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c4b6426-b32c-4839-b63d-75b2995ddc8c" containerID="e4d7537afa2f860b6532ba5e997af8934af84312a862c1315f50b6eba8055cb2" exitCode=0 Oct 09 14:45:18 crc kubenswrapper[4762]: I1009 14:45:18.979898 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2c4b6426-b32c-4839-b63d-75b2995ddc8c","Type":"ContainerDied","Data":"e4d7537afa2f860b6532ba5e997af8934af84312a862c1315f50b6eba8055cb2"} Oct 09 14:45:19 crc kubenswrapper[4762]: I1009 14:45:19.988168 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c87d7be6-a929-4a4a-bd38-184fb1405635","Type":"ContainerStarted","Data":"4a9eedfca350ec2391fff0e2a614e14f9532738dfa6b2ae1cdf2c154184fef32"} Oct 09 14:45:19 crc kubenswrapper[4762]: I1009 14:45:19.990243 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2c4b6426-b32c-4839-b63d-75b2995ddc8c","Type":"ContainerStarted","Data":"25605f7e7c1f18f1f5928c514aba2fef74a59c8aeb8f8dffa5040df356ee1f89"} Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.007415 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.007388884 podStartE2EDuration="7.007388884s" podCreationTimestamp="2025-10-09 14:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:20.005233998 +0000 UTC m=+4795.779025047" watchObservedRunningTime="2025-10-09 14:45:20.007388884 +0000 UTC m=+4795.781179923" Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.030785 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.03075439 podStartE2EDuration="7.03075439s" podCreationTimestamp="2025-10-09 14:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:20.023704307 +0000 UTC m=+4795.797495356" watchObservedRunningTime="2025-10-09 14:45:20.03075439 +0000 UTC m=+4795.804545429" Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.454859 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.656826 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.702087 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:20 crc kubenswrapper[4762]: I1009 14:45:20.996510 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="dnsmasq-dns" containerID="cri-o://3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc" gracePeriod=10 Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.422922 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.513227 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwjr5\" (UniqueName: \"kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5\") pod \"a521771d-024e-42f3-9600-e0ec9eece88f\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.513383 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc\") pod \"a521771d-024e-42f3-9600-e0ec9eece88f\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.513452 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config\") pod \"a521771d-024e-42f3-9600-e0ec9eece88f\" (UID: \"a521771d-024e-42f3-9600-e0ec9eece88f\") " Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.518936 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5" (OuterVolumeSpecName: "kube-api-access-fwjr5") pod "a521771d-024e-42f3-9600-e0ec9eece88f" (UID: "a521771d-024e-42f3-9600-e0ec9eece88f"). InnerVolumeSpecName "kube-api-access-fwjr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.553764 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config" (OuterVolumeSpecName: "config") pod "a521771d-024e-42f3-9600-e0ec9eece88f" (UID: "a521771d-024e-42f3-9600-e0ec9eece88f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.562608 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a521771d-024e-42f3-9600-e0ec9eece88f" (UID: "a521771d-024e-42f3-9600-e0ec9eece88f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.615378 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwjr5\" (UniqueName: \"kubernetes.io/projected/a521771d-024e-42f3-9600-e0ec9eece88f-kube-api-access-fwjr5\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.615422 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:21 crc kubenswrapper[4762]: I1009 14:45:21.615437 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a521771d-024e-42f3-9600-e0ec9eece88f-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.005150 4762 generic.go:334] "Generic (PLEG): container finished" podID="a521771d-024e-42f3-9600-e0ec9eece88f" containerID="3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc" exitCode=0 Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.005206 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" event={"ID":"a521771d-024e-42f3-9600-e0ec9eece88f","Type":"ContainerDied","Data":"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc"} Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.005229 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.005284 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-z5rnq" event={"ID":"a521771d-024e-42f3-9600-e0ec9eece88f","Type":"ContainerDied","Data":"77a014effc749861b10d81c04ac6a901124c8cbb1e0248371d8bbff811fca340"} Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.005310 4762 scope.go:117] "RemoveContainer" containerID="3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.028839 4762 scope.go:117] "RemoveContainer" containerID="2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.041475 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.047262 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-z5rnq"] Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.050915 4762 scope.go:117] "RemoveContainer" containerID="3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc" Oct 09 14:45:22 crc kubenswrapper[4762]: E1009 14:45:22.051439 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc\": container with ID starting with 3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc not found: ID does not exist" containerID="3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.051583 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc"} err="failed to get container status \"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc\": rpc error: code = NotFound desc = could not find container \"3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc\": container with ID starting with 3d008bf0f4185fcf3486e228d342e93ae43c3869945c92bf5af9b589b049d8bc not found: ID does not exist" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.051729 4762 scope.go:117] "RemoveContainer" containerID="2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b" Oct 09 14:45:22 crc kubenswrapper[4762]: E1009 14:45:22.052147 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b\": container with ID starting with 2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b not found: ID does not exist" containerID="2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.052178 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b"} err="failed to get container status \"2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b\": rpc error: code = NotFound desc = could not find container \"2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b\": container with ID starting with 2d77e6ee927aa7dba95083b1856cb8efd0a22e84328aa427e9a7b8599f40c55b not found: ID does not exist" Oct 09 14:45:22 crc kubenswrapper[4762]: I1009 14:45:22.974768 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" path="/var/lib/kubelet/pods/a521771d-024e-42f3-9600-e0ec9eece88f/volumes" Oct 09 14:45:23 crc kubenswrapper[4762]: I1009 14:45:23.478120 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 09 14:45:24 crc kubenswrapper[4762]: I1009 14:45:24.834192 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 09 14:45:24 crc kubenswrapper[4762]: I1009 14:45:24.834249 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 09 14:45:24 crc kubenswrapper[4762]: I1009 14:45:24.878463 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:24 crc kubenswrapper[4762]: I1009 14:45:24.878513 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:26 crc kubenswrapper[4762]: I1009 14:45:26.890552 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 09 14:45:26 crc kubenswrapper[4762]: I1009 14:45:26.933573 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:26 crc kubenswrapper[4762]: I1009 14:45:26.944346 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 09 14:45:26 crc kubenswrapper[4762]: I1009 14:45:26.999526 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 09 14:45:31 crc kubenswrapper[4762]: I1009 14:45:31.403879 4762 scope.go:117] "RemoveContainer" containerID="e9a77030f4a5d7eb7299e1a5560aef53355916c12293e8c7c5208bdb41c12b34" Oct 09 14:45:31 crc kubenswrapper[4762]: I1009 14:45:31.965549 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:45:31 crc kubenswrapper[4762]: E1009 14:45:31.965886 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:45:44 crc kubenswrapper[4762]: I1009 14:45:44.971974 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:45:44 crc kubenswrapper[4762]: E1009 14:45:44.973489 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:45:45 crc kubenswrapper[4762]: I1009 14:45:45.177359 4762 generic.go:334] "Generic (PLEG): container finished" podID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerID="19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d" exitCode=0 Oct 09 14:45:45 crc kubenswrapper[4762]: I1009 14:45:45.177445 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerDied","Data":"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d"} Oct 09 14:45:46 crc kubenswrapper[4762]: I1009 14:45:46.185280 4762 generic.go:334] "Generic (PLEG): container finished" podID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerID="90404abe09e95d47da5c85d592fc0dc360d18dfff674505bb6ad9579e5bb2992" exitCode=0 Oct 09 14:45:46 crc kubenswrapper[4762]: I1009 14:45:46.185317 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerDied","Data":"90404abe09e95d47da5c85d592fc0dc360d18dfff674505bb6ad9579e5bb2992"} Oct 09 14:45:46 crc kubenswrapper[4762]: I1009 14:45:46.187543 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerStarted","Data":"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81"} Oct 09 14:45:46 crc kubenswrapper[4762]: I1009 14:45:46.188050 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 09 14:45:46 crc kubenswrapper[4762]: I1009 14:45:46.258081 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.258063893 podStartE2EDuration="36.258063893s" podCreationTimestamp="2025-10-09 14:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:46.250742724 +0000 UTC m=+4822.024533763" watchObservedRunningTime="2025-10-09 14:45:46.258063893 +0000 UTC m=+4822.031854922" Oct 09 14:45:47 crc kubenswrapper[4762]: I1009 14:45:47.196931 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerStarted","Data":"b8c5dc9c5be44be2693f86d2e22449274a3f1de39a68cec1c3a9962385d3eead"} Oct 09 14:45:47 crc kubenswrapper[4762]: I1009 14:45:47.197496 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:45:47 crc kubenswrapper[4762]: I1009 14:45:47.224427 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.224405271 podStartE2EDuration="37.224405271s" podCreationTimestamp="2025-10-09 14:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:45:47.220424578 +0000 UTC m=+4822.994215617" watchObservedRunningTime="2025-10-09 14:45:47.224405271 +0000 UTC m=+4822.998196310" Oct 09 14:45:58 crc kubenswrapper[4762]: I1009 14:45:58.965742 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:45:58 crc kubenswrapper[4762]: E1009 14:45:58.966454 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:46:01 crc kubenswrapper[4762]: I1009 14:46:01.580880 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 09 14:46:01 crc kubenswrapper[4762]: I1009 14:46:01.848530 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.977702 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:46:06 crc kubenswrapper[4762]: E1009 14:46:06.979682 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="dnsmasq-dns" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.979803 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="dnsmasq-dns" Oct 09 14:46:06 crc kubenswrapper[4762]: E1009 14:46:06.979899 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="init" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.979977 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="init" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.980246 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a521771d-024e-42f3-9600-e0ec9eece88f" containerName="dnsmasq-dns" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.981318 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.982869 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.991471 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.991543 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:06 crc kubenswrapper[4762]: I1009 14:46:06.991570 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjl4r\" (UniqueName: \"kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.093152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.093239 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.093270 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjl4r\" (UniqueName: \"kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.094389 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.094477 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.113885 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjl4r\" (UniqueName: \"kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r\") pod \"dnsmasq-dns-5b7946d7b9-gxm4j\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.303107 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.561838 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:07 crc kubenswrapper[4762]: I1009 14:46:07.740384 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:46:07 crc kubenswrapper[4762]: W1009 14:46:07.750075 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2a2847d_785a_4b3d_a7b6_852724bb6e22.slice/crio-3bff25c12c79fb0c7e632e4f9ee187dc3924c6b5fc5bd941da725fd2b164ae5f WatchSource:0}: Error finding container 3bff25c12c79fb0c7e632e4f9ee187dc3924c6b5fc5bd941da725fd2b164ae5f: Status 404 returned error can't find the container with id 3bff25c12c79fb0c7e632e4f9ee187dc3924c6b5fc5bd941da725fd2b164ae5f Oct 09 14:46:08 crc kubenswrapper[4762]: I1009 14:46:08.078475 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:08 crc kubenswrapper[4762]: I1009 14:46:08.374085 4762 generic.go:334] "Generic (PLEG): container finished" podID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerID="3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c" exitCode=0 Oct 09 14:46:08 crc kubenswrapper[4762]: I1009 14:46:08.374142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" event={"ID":"a2a2847d-785a-4b3d-a7b6-852724bb6e22","Type":"ContainerDied","Data":"3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c"} Oct 09 14:46:08 crc kubenswrapper[4762]: I1009 14:46:08.374184 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" event={"ID":"a2a2847d-785a-4b3d-a7b6-852724bb6e22","Type":"ContainerStarted","Data":"3bff25c12c79fb0c7e632e4f9ee187dc3924c6b5fc5bd941da725fd2b164ae5f"} Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.347769 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="rabbitmq" containerID="cri-o://3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81" gracePeriod=604799 Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.385008 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" event={"ID":"a2a2847d-785a-4b3d-a7b6-852724bb6e22","Type":"ContainerStarted","Data":"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6"} Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.385132 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.410595 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" podStartSLOduration=3.410573827 podStartE2EDuration="3.410573827s" podCreationTimestamp="2025-10-09 14:46:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:46:09.405266339 +0000 UTC m=+4845.179057388" watchObservedRunningTime="2025-10-09 14:46:09.410573827 +0000 UTC m=+4845.184364876" Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.939864 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="rabbitmq" containerID="cri-o://b8c5dc9c5be44be2693f86d2e22449274a3f1de39a68cec1c3a9962385d3eead" gracePeriod=604799 Oct 09 14:46:09 crc kubenswrapper[4762]: I1009 14:46:09.965385 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:46:09 crc kubenswrapper[4762]: E1009 14:46:09.965622 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.469988 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.472193 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.484355 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.643239 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.644492 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.644562 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xln6p\" (UniqueName: \"kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.745966 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.746064 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.746132 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xln6p\" (UniqueName: \"kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.746511 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.746618 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.771612 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xln6p\" (UniqueName: \"kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p\") pod \"redhat-operators-sjgxs\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:10 crc kubenswrapper[4762]: I1009 14:46:10.803479 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:11 crc kubenswrapper[4762]: I1009 14:46:11.286001 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:11 crc kubenswrapper[4762]: I1009 14:46:11.445727 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerStarted","Data":"582ff5a4ac12b56fa48fba2dc622e5acd3bf6e55a249963ce68234c8ea76fe05"} Oct 09 14:46:11 crc kubenswrapper[4762]: I1009 14:46:11.578739 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.239:5672: connect: connection refused" Oct 09 14:46:11 crc kubenswrapper[4762]: I1009 14:46:11.846129 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5672: connect: connection refused" Oct 09 14:46:12 crc kubenswrapper[4762]: I1009 14:46:12.457088 4762 generic.go:334] "Generic (PLEG): container finished" podID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerID="ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151" exitCode=0 Oct 09 14:46:12 crc kubenswrapper[4762]: I1009 14:46:12.457171 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerDied","Data":"ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151"} Oct 09 14:46:15 crc kubenswrapper[4762]: I1009 14:46:15.479961 4762 generic.go:334] "Generic (PLEG): container finished" podID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerID="7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a" exitCode=0 Oct 09 14:46:15 crc kubenswrapper[4762]: I1009 14:46:15.480024 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerDied","Data":"7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a"} Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.102830 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.221889 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.221938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222053 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222117 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222155 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwb67\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222170 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222209 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222262 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222280 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd\") pod \"d4c7de04-1c4e-42b2-a965-4c046e51b272\" (UID: \"d4c7de04-1c4e-42b2-a965-4c046e51b272\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222504 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222614 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222711 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.222883 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.227244 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info" (OuterVolumeSpecName: "pod-info") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.227406 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67" (OuterVolumeSpecName: "kube-api-access-kwb67") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "kube-api-access-kwb67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.227571 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.247381 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf" (OuterVolumeSpecName: "server-conf") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.261429 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7" (OuterVolumeSpecName: "persistence") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "pvc-8045949f-ad0a-4376-97e1-09f153c59af7". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323396 4762 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323466 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") on node \"crc\" " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323482 4762 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d4c7de04-1c4e-42b2-a965-4c046e51b272-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323494 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwb67\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-kube-api-access-kwb67\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323506 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323515 4762 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d4c7de04-1c4e-42b2-a965-4c046e51b272-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.323526 4762 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d4c7de04-1c4e-42b2-a965-4c046e51b272-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.327396 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d4c7de04-1c4e-42b2-a965-4c046e51b272" (UID: "d4c7de04-1c4e-42b2-a965-4c046e51b272"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.340061 4762 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.340212 4762 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8045949f-ad0a-4376-97e1-09f153c59af7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7") on node "crc" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.424551 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d4c7de04-1c4e-42b2-a965-4c046e51b272-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.424889 4762 reconciler_common.go:293] "Volume detached for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.490429 4762 generic.go:334] "Generic (PLEG): container finished" podID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerID="3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81" exitCode=0 Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.490485 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerDied","Data":"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81"} Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.490532 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d4c7de04-1c4e-42b2-a965-4c046e51b272","Type":"ContainerDied","Data":"1b3602ca07b3f72d0e7d583ee9f32109c897d47d3359020c681feb25a4ddf8c4"} Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.490529 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.490555 4762 scope.go:117] "RemoveContainer" containerID="3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.493949 4762 generic.go:334] "Generic (PLEG): container finished" podID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerID="b8c5dc9c5be44be2693f86d2e22449274a3f1de39a68cec1c3a9962385d3eead" exitCode=0 Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.494002 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerDied","Data":"b8c5dc9c5be44be2693f86d2e22449274a3f1de39a68cec1c3a9962385d3eead"} Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.527368 4762 scope.go:117] "RemoveContainer" containerID="19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.552131 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.556229 4762 scope.go:117] "RemoveContainer" containerID="3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81" Oct 09 14:46:16 crc kubenswrapper[4762]: E1009 14:46:16.556752 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81\": container with ID starting with 3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81 not found: ID does not exist" containerID="3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.556785 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81"} err="failed to get container status \"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81\": rpc error: code = NotFound desc = could not find container \"3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81\": container with ID starting with 3dd4260d863005a60dafc1237e59b8f58c54701546f2da1a87fb3637ba04cc81 not found: ID does not exist" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.556809 4762 scope.go:117] "RemoveContainer" containerID="19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d" Oct 09 14:46:16 crc kubenswrapper[4762]: E1009 14:46:16.557263 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d\": container with ID starting with 19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d not found: ID does not exist" containerID="19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.557306 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d"} err="failed to get container status \"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d\": rpc error: code = NotFound desc = could not find container \"19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d\": container with ID starting with 19823400e35afd562f963d678f8d72188ac64e8573e8c757ae26ccd13845b90d not found: ID does not exist" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.568829 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.590223 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:16 crc kubenswrapper[4762]: E1009 14:46:16.590581 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="rabbitmq" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.590600 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="rabbitmq" Oct 09 14:46:16 crc kubenswrapper[4762]: E1009 14:46:16.590649 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="setup-container" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.590665 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="setup-container" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.590803 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" containerName="rabbitmq" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.591661 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.593400 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.595524 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.595618 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.595712 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wg8dn" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.595842 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.598600 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728186 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zhlk\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-kube-api-access-4zhlk\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728279 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728303 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728330 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728396 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728419 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728440 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.728456 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.827630 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.829750 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.829815 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.829900 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.829949 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.829999 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830031 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830110 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830150 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zhlk\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-kube-api-access-4zhlk\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830216 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830543 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830560 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.830926 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.831609 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.840586 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.841245 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.841288 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e0c286c9173eff4b55d4fbe412e6005eea55968be4a0454aba028e2b78d27ba4/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.842145 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.842609 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.856783 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zhlk\" (UniqueName: \"kubernetes.io/projected/c5f79dc6-e3fe-4d27-889e-5ea96d334d8a-kube-api-access-4zhlk\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.881179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8045949f-ad0a-4376-97e1-09f153c59af7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8045949f-ad0a-4376-97e1-09f153c59af7\") pod \"rabbitmq-server-0\" (UID: \"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a\") " pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.912048 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931028 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931102 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77wbg\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931141 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931225 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931304 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931378 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931579 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931622 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931731 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf\") pod \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\" (UID: \"b96c5d4d-5b68-434c-a891-aceb2ea69e00\") " Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.931833 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.932286 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.932318 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.932962 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.934066 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info" (OuterVolumeSpecName: "pod-info") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.934322 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg" (OuterVolumeSpecName: "kube-api-access-77wbg") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "kube-api-access-77wbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.935039 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.947511 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d" (OuterVolumeSpecName: "persistence") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.953892 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf" (OuterVolumeSpecName: "server-conf") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:16 crc kubenswrapper[4762]: I1009 14:46:16.974521 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4c7de04-1c4e-42b2-a965-4c046e51b272" path="/var/lib/kubelet/pods/d4c7de04-1c4e-42b2-a965-4c046e51b272/volumes" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.003449 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b96c5d4d-5b68-434c-a891-aceb2ea69e00" (UID: "b96c5d4d-5b68-434c-a891-aceb2ea69e00"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033899 4762 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033954 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") on node \"crc\" " Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033965 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033973 4762 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b96c5d4d-5b68-434c-a891-aceb2ea69e00-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033983 4762 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b96c5d4d-5b68-434c-a891-aceb2ea69e00-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033992 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77wbg\" (UniqueName: \"kubernetes.io/projected/b96c5d4d-5b68-434c-a891-aceb2ea69e00-kube-api-access-77wbg\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.033999 4762 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b96c5d4d-5b68-434c-a891-aceb2ea69e00-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.034011 4762 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b96c5d4d-5b68-434c-a891-aceb2ea69e00-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.053697 4762 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.053871 4762 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d") on node "crc" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.135345 4762 reconciler_common.go:293] "Volume detached for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.304842 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.347028 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.356518 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.356797 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="dnsmasq-dns" containerID="cri-o://f693ee2e2facc5b2644543dc7aac044520f0c3895010e58af77c7f30efcad974" gracePeriod=10 Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.506887 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b96c5d4d-5b68-434c-a891-aceb2ea69e00","Type":"ContainerDied","Data":"0ef26664af556d1a57df8fefa518296ddc8fec9075c43e036fa267e15302fa8b"} Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.506943 4762 scope.go:117] "RemoveContainer" containerID="b8c5dc9c5be44be2693f86d2e22449274a3f1de39a68cec1c3a9962385d3eead" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.507042 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.511450 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a","Type":"ContainerStarted","Data":"6df9624b157fad695d5de3f8660f2698810ce2d7b03d7848d512923a8caa959d"} Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.596253 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.602232 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.621959 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:17 crc kubenswrapper[4762]: E1009 14:46:17.622337 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="setup-container" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.622359 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="setup-container" Oct 09 14:46:17 crc kubenswrapper[4762]: E1009 14:46:17.622408 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="rabbitmq" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.622418 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="rabbitmq" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.622598 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" containerName="rabbitmq" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.623609 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.627016 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.627038 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.627013 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-xkdg6" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.627395 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.627505 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.638783 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.752442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/96ee0476-093c-4e1e-ba2c-b2890600c1f3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.752705 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.752880 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753040 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753207 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753351 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753468 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753600 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/96ee0476-093c-4e1e-ba2c-b2890600c1f3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.753833 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj4rn\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-kube-api-access-cj4rn\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.854990 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855299 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855434 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855878 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855987 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.856095 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/96ee0476-093c-4e1e-ba2c-b2890600c1f3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.856612 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj4rn\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-kube-api-access-cj4rn\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.856755 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/96ee0476-093c-4e1e-ba2c-b2890600c1f3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.856863 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.856039 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855834 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.855909 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.857778 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/96ee0476-093c-4e1e-ba2c-b2890600c1f3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.858294 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.858435 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2dcf0aa2556825a990326d84417e0004092a6f2826f39ec7e81efe6c379be0e9/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.860421 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/96ee0476-093c-4e1e-ba2c-b2890600c1f3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.860889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.868410 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/96ee0476-093c-4e1e-ba2c-b2890600c1f3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.886404 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj4rn\" (UniqueName: \"kubernetes.io/projected/96ee0476-093c-4e1e-ba2c-b2890600c1f3-kube-api-access-cj4rn\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.888574 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c397795a-92b5-4edc-a3d4-99af55c1ad7d\") pod \"rabbitmq-cell1-server-0\" (UID: \"96ee0476-093c-4e1e-ba2c-b2890600c1f3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:17 crc kubenswrapper[4762]: I1009 14:46:17.940263 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:18 crc kubenswrapper[4762]: I1009 14:46:18.521536 4762 generic.go:334] "Generic (PLEG): container finished" podID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerID="f693ee2e2facc5b2644543dc7aac044520f0c3895010e58af77c7f30efcad974" exitCode=0 Oct 09 14:46:18 crc kubenswrapper[4762]: I1009 14:46:18.521592 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" event={"ID":"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b","Type":"ContainerDied","Data":"f693ee2e2facc5b2644543dc7aac044520f0c3895010e58af77c7f30efcad974"} Oct 09 14:46:18 crc kubenswrapper[4762]: I1009 14:46:18.800108 4762 scope.go:117] "RemoveContainer" containerID="90404abe09e95d47da5c85d592fc0dc360d18dfff674505bb6ad9579e5bb2992" Oct 09 14:46:18 crc kubenswrapper[4762]: I1009 14:46:18.977897 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b96c5d4d-5b68-434c-a891-aceb2ea69e00" path="/var/lib/kubelet/pods/b96c5d4d-5b68-434c-a891-aceb2ea69e00/volumes" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.217303 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.387904 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc\") pod \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.388910 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z9tj\" (UniqueName: \"kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj\") pod \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.389004 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config\") pod \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\" (UID: \"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b\") " Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.398085 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj" (OuterVolumeSpecName: "kube-api-access-5z9tj") pod "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" (UID: "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b"). InnerVolumeSpecName "kube-api-access-5z9tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.442601 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config" (OuterVolumeSpecName: "config") pod "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" (UID: "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.443215 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" (UID: "8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.491240 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.491290 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z9tj\" (UniqueName: \"kubernetes.io/projected/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-kube-api-access-5z9tj\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.491308 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.532163 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" event={"ID":"8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b","Type":"ContainerDied","Data":"35132b8ebdd0c5a78bf71029fdcdff0113771d64ce4132e6af57af2e745b34a1"} Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.532248 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-vj8kn" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.532273 4762 scope.go:117] "RemoveContainer" containerID="f693ee2e2facc5b2644543dc7aac044520f0c3895010e58af77c7f30efcad974" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.553755 4762 scope.go:117] "RemoveContainer" containerID="b7b50ce357513270b70bd4714e36ec2e98c27320853d7e3c6388953cb540fb53" Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.566717 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.573736 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-vj8kn"] Oct 09 14:46:19 crc kubenswrapper[4762]: I1009 14:46:19.713560 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 14:46:20 crc kubenswrapper[4762]: I1009 14:46:20.540660 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"96ee0476-093c-4e1e-ba2c-b2890600c1f3","Type":"ContainerStarted","Data":"e4ec4b883c56771834bc2013998336924bf24a1392530caf0c22a659e13a6e4a"} Oct 09 14:46:20 crc kubenswrapper[4762]: I1009 14:46:20.544274 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerStarted","Data":"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993"} Oct 09 14:46:20 crc kubenswrapper[4762]: I1009 14:46:20.981745 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" path="/var/lib/kubelet/pods/8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b/volumes" Oct 09 14:46:21 crc kubenswrapper[4762]: I1009 14:46:21.553913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a","Type":"ContainerStarted","Data":"9d10dd8943fd3edbcbb72487aaab83f1c44e6659a515c1fda548b705eda35b1b"} Oct 09 14:46:21 crc kubenswrapper[4762]: I1009 14:46:21.575176 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sjgxs" podStartSLOduration=4.7648087839999995 podStartE2EDuration="11.575155469s" podCreationTimestamp="2025-10-09 14:46:10 +0000 UTC" firstStartedPulling="2025-10-09 14:46:12.459854166 +0000 UTC m=+4848.233645205" lastFinishedPulling="2025-10-09 14:46:19.270200851 +0000 UTC m=+4855.043991890" observedRunningTime="2025-10-09 14:46:21.570691444 +0000 UTC m=+4857.344482503" watchObservedRunningTime="2025-10-09 14:46:21.575155469 +0000 UTC m=+4857.348946508" Oct 09 14:46:22 crc kubenswrapper[4762]: I1009 14:46:22.561592 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"96ee0476-093c-4e1e-ba2c-b2890600c1f3","Type":"ContainerStarted","Data":"7225f0cc690f2e1ed4dfbb20028aacdad1be1741f1b07707a41af1ec89fe1225"} Oct 09 14:46:22 crc kubenswrapper[4762]: I1009 14:46:22.965278 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:46:22 crc kubenswrapper[4762]: E1009 14:46:22.965557 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:46:30 crc kubenswrapper[4762]: I1009 14:46:30.804672 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:30 crc kubenswrapper[4762]: I1009 14:46:30.805368 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:30 crc kubenswrapper[4762]: I1009 14:46:30.849320 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:31 crc kubenswrapper[4762]: I1009 14:46:31.687183 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:31 crc kubenswrapper[4762]: I1009 14:46:31.730148 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:33 crc kubenswrapper[4762]: I1009 14:46:33.662110 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sjgxs" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="registry-server" containerID="cri-o://d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993" gracePeriod=2 Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.650244 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.677089 4762 generic.go:334] "Generic (PLEG): container finished" podID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerID="d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993" exitCode=0 Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.677135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerDied","Data":"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993"} Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.677167 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sjgxs" event={"ID":"08640fe4-bd93-498c-a622-c1642ac60ff9","Type":"ContainerDied","Data":"582ff5a4ac12b56fa48fba2dc622e5acd3bf6e55a249963ce68234c8ea76fe05"} Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.677171 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sjgxs" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.677187 4762 scope.go:117] "RemoveContainer" containerID="d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.704606 4762 scope.go:117] "RemoveContainer" containerID="7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.725434 4762 scope.go:117] "RemoveContainer" containerID="ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.727652 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities\") pod \"08640fe4-bd93-498c-a622-c1642ac60ff9\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.728051 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xln6p\" (UniqueName: \"kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p\") pod \"08640fe4-bd93-498c-a622-c1642ac60ff9\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.728679 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities" (OuterVolumeSpecName: "utilities") pod "08640fe4-bd93-498c-a622-c1642ac60ff9" (UID: "08640fe4-bd93-498c-a622-c1642ac60ff9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.729129 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.734208 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p" (OuterVolumeSpecName: "kube-api-access-xln6p") pod "08640fe4-bd93-498c-a622-c1642ac60ff9" (UID: "08640fe4-bd93-498c-a622-c1642ac60ff9"). InnerVolumeSpecName "kube-api-access-xln6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.772694 4762 scope.go:117] "RemoveContainer" containerID="d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993" Oct 09 14:46:34 crc kubenswrapper[4762]: E1009 14:46:34.773108 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993\": container with ID starting with d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993 not found: ID does not exist" containerID="d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.773144 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993"} err="failed to get container status \"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993\": rpc error: code = NotFound desc = could not find container \"d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993\": container with ID starting with d6cc5dc232cfec2f5ca783e8560e78812cafba1646ef9484c1521ea85de39993 not found: ID does not exist" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.773168 4762 scope.go:117] "RemoveContainer" containerID="7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a" Oct 09 14:46:34 crc kubenswrapper[4762]: E1009 14:46:34.773430 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a\": container with ID starting with 7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a not found: ID does not exist" containerID="7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.773464 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a"} err="failed to get container status \"7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a\": rpc error: code = NotFound desc = could not find container \"7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a\": container with ID starting with 7a15a4c0b85e3293ccdcd0e085317a70c131a2ce4252a356cb7d5a74becf249a not found: ID does not exist" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.773485 4762 scope.go:117] "RemoveContainer" containerID="ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151" Oct 09 14:46:34 crc kubenswrapper[4762]: E1009 14:46:34.773789 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151\": container with ID starting with ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151 not found: ID does not exist" containerID="ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.773819 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151"} err="failed to get container status \"ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151\": rpc error: code = NotFound desc = could not find container \"ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151\": container with ID starting with ea3d50619cbbefb454c1a78479024f61a611d396fb7a40900b139736ea538151 not found: ID does not exist" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.829995 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content\") pod \"08640fe4-bd93-498c-a622-c1642ac60ff9\" (UID: \"08640fe4-bd93-498c-a622-c1642ac60ff9\") " Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.830465 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xln6p\" (UniqueName: \"kubernetes.io/projected/08640fe4-bd93-498c-a622-c1642ac60ff9-kube-api-access-xln6p\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.918021 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08640fe4-bd93-498c-a622-c1642ac60ff9" (UID: "08640fe4-bd93-498c-a622-c1642ac60ff9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:46:34 crc kubenswrapper[4762]: I1009 14:46:34.931433 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08640fe4-bd93-498c-a622-c1642ac60ff9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:46:35 crc kubenswrapper[4762]: I1009 14:46:35.008038 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:35 crc kubenswrapper[4762]: I1009 14:46:35.012853 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sjgxs"] Oct 09 14:46:36 crc kubenswrapper[4762]: I1009 14:46:36.965374 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:46:36 crc kubenswrapper[4762]: E1009 14:46:36.965669 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:46:36 crc kubenswrapper[4762]: I1009 14:46:36.974182 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" path="/var/lib/kubelet/pods/08640fe4-bd93-498c-a622-c1642ac60ff9/volumes" Oct 09 14:46:47 crc kubenswrapper[4762]: I1009 14:46:47.965031 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:46:48 crc kubenswrapper[4762]: I1009 14:46:48.836996 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5"} Oct 09 14:46:53 crc kubenswrapper[4762]: I1009 14:46:53.880598 4762 generic.go:334] "Generic (PLEG): container finished" podID="c5f79dc6-e3fe-4d27-889e-5ea96d334d8a" containerID="9d10dd8943fd3edbcbb72487aaab83f1c44e6659a515c1fda548b705eda35b1b" exitCode=0 Oct 09 14:46:53 crc kubenswrapper[4762]: I1009 14:46:53.880708 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a","Type":"ContainerDied","Data":"9d10dd8943fd3edbcbb72487aaab83f1c44e6659a515c1fda548b705eda35b1b"} Oct 09 14:46:54 crc kubenswrapper[4762]: I1009 14:46:54.891367 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c5f79dc6-e3fe-4d27-889e-5ea96d334d8a","Type":"ContainerStarted","Data":"c033693b52e1a673e38550ee417070432a8ccc4aa4735258f8fa99681831a598"} Oct 09 14:46:54 crc kubenswrapper[4762]: I1009 14:46:54.892183 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 09 14:46:54 crc kubenswrapper[4762]: I1009 14:46:54.893531 4762 generic.go:334] "Generic (PLEG): container finished" podID="96ee0476-093c-4e1e-ba2c-b2890600c1f3" containerID="7225f0cc690f2e1ed4dfbb20028aacdad1be1741f1b07707a41af1ec89fe1225" exitCode=0 Oct 09 14:46:54 crc kubenswrapper[4762]: I1009 14:46:54.893559 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"96ee0476-093c-4e1e-ba2c-b2890600c1f3","Type":"ContainerDied","Data":"7225f0cc690f2e1ed4dfbb20028aacdad1be1741f1b07707a41af1ec89fe1225"} Oct 09 14:46:54 crc kubenswrapper[4762]: I1009 14:46:54.955128 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.955109425 podStartE2EDuration="38.955109425s" podCreationTimestamp="2025-10-09 14:46:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:46:54.919229015 +0000 UTC m=+4890.693020064" watchObservedRunningTime="2025-10-09 14:46:54.955109425 +0000 UTC m=+4890.728900464" Oct 09 14:46:55 crc kubenswrapper[4762]: I1009 14:46:55.904059 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"96ee0476-093c-4e1e-ba2c-b2890600c1f3","Type":"ContainerStarted","Data":"e862e78fa62280e7e77c39a14f13f74a89562d8c3b148e067aa784d60bcaef6b"} Oct 09 14:46:55 crc kubenswrapper[4762]: I1009 14:46:55.904598 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:46:55 crc kubenswrapper[4762]: I1009 14:46:55.932082 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.932062178 podStartE2EDuration="38.932062178s" podCreationTimestamp="2025-10-09 14:46:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:46:55.928293049 +0000 UTC m=+4891.702084108" watchObservedRunningTime="2025-10-09 14:46:55.932062178 +0000 UTC m=+4891.705853217" Oct 09 14:47:06 crc kubenswrapper[4762]: I1009 14:47:06.917500 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 09 14:47:07 crc kubenswrapper[4762]: I1009 14:47:07.942828 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.213508 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Oct 09 14:47:14 crc kubenswrapper[4762]: E1009 14:47:14.214030 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="registry-server" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214044 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="registry-server" Oct 09 14:47:14 crc kubenswrapper[4762]: E1009 14:47:14.214054 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="extract-utilities" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214060 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="extract-utilities" Oct 09 14:47:14 crc kubenswrapper[4762]: E1009 14:47:14.214069 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="extract-content" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214076 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="extract-content" Oct 09 14:47:14 crc kubenswrapper[4762]: E1009 14:47:14.214087 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="init" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214092 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="init" Oct 09 14:47:14 crc kubenswrapper[4762]: E1009 14:47:14.214103 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="dnsmasq-dns" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214109 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="dnsmasq-dns" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214248 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b21fbda-eb6e-4a0e-a4a9-12cf5ea7814b" containerName="dnsmasq-dns" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214257 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="08640fe4-bd93-498c-a622-c1642ac60ff9" containerName="registry-server" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.214855 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.216937 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gjg68" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.231901 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.288593 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgn66\" (UniqueName: \"kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66\") pod \"mariadb-client-1-default\" (UID: \"71c062eb-4231-497e-84b1-d526c66f9a5a\") " pod="openstack/mariadb-client-1-default" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.390209 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgn66\" (UniqueName: \"kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66\") pod \"mariadb-client-1-default\" (UID: \"71c062eb-4231-497e-84b1-d526c66f9a5a\") " pod="openstack/mariadb-client-1-default" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.412594 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgn66\" (UniqueName: \"kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66\") pod \"mariadb-client-1-default\" (UID: \"71c062eb-4231-497e-84b1-d526c66f9a5a\") " pod="openstack/mariadb-client-1-default" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.542572 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 09 14:47:14 crc kubenswrapper[4762]: I1009 14:47:14.862275 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 09 14:47:14 crc kubenswrapper[4762]: W1009 14:47:14.875265 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71c062eb_4231_497e_84b1_d526c66f9a5a.slice/crio-3b5bb60f1973db3736fe5915190ca970d618bf8bf4388b164a37e3bf0e92e328 WatchSource:0}: Error finding container 3b5bb60f1973db3736fe5915190ca970d618bf8bf4388b164a37e3bf0e92e328: Status 404 returned error can't find the container with id 3b5bb60f1973db3736fe5915190ca970d618bf8bf4388b164a37e3bf0e92e328 Oct 09 14:47:15 crc kubenswrapper[4762]: I1009 14:47:15.067590 4762 generic.go:334] "Generic (PLEG): container finished" podID="71c062eb-4231-497e-84b1-d526c66f9a5a" containerID="ea6fcb3700bf1600e05d7a08c3777603366a1cd0878e8f31371b3d799ea57cf6" exitCode=0 Oct 09 14:47:15 crc kubenswrapper[4762]: I1009 14:47:15.067694 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"71c062eb-4231-497e-84b1-d526c66f9a5a","Type":"ContainerDied","Data":"ea6fcb3700bf1600e05d7a08c3777603366a1cd0878e8f31371b3d799ea57cf6"} Oct 09 14:47:15 crc kubenswrapper[4762]: I1009 14:47:15.067728 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"71c062eb-4231-497e-84b1-d526c66f9a5a","Type":"ContainerStarted","Data":"3b5bb60f1973db3736fe5915190ca970d618bf8bf4388b164a37e3bf0e92e328"} Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.457106 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.486434 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_71c062eb-4231-497e-84b1-d526c66f9a5a/mariadb-client-1-default/0.log" Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.513560 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgn66\" (UniqueName: \"kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66\") pod \"71c062eb-4231-497e-84b1-d526c66f9a5a\" (UID: \"71c062eb-4231-497e-84b1-d526c66f9a5a\") " Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.515290 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.520064 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66" (OuterVolumeSpecName: "kube-api-access-bgn66") pod "71c062eb-4231-497e-84b1-d526c66f9a5a" (UID: "71c062eb-4231-497e-84b1-d526c66f9a5a"). InnerVolumeSpecName "kube-api-access-bgn66". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.524874 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.615596 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgn66\" (UniqueName: \"kubernetes.io/projected/71c062eb-4231-497e-84b1-d526c66f9a5a-kube-api-access-bgn66\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:16 crc kubenswrapper[4762]: I1009 14:47:16.979205 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71c062eb-4231-497e-84b1-d526c66f9a5a" path="/var/lib/kubelet/pods/71c062eb-4231-497e-84b1-d526c66f9a5a/volumes" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.005000 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Oct 09 14:47:17 crc kubenswrapper[4762]: E1009 14:47:17.005329 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71c062eb-4231-497e-84b1-d526c66f9a5a" containerName="mariadb-client-1-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.005345 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="71c062eb-4231-497e-84b1-d526c66f9a5a" containerName="mariadb-client-1-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.005489 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="71c062eb-4231-497e-84b1-d526c66f9a5a" containerName="mariadb-client-1-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.006038 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.014929 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.022055 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjc7p\" (UniqueName: \"kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p\") pod \"mariadb-client-2-default\" (UID: \"5393a79f-8d2d-4c93-bd22-501de1c9a493\") " pod="openstack/mariadb-client-2-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.085779 4762 scope.go:117] "RemoveContainer" containerID="ea6fcb3700bf1600e05d7a08c3777603366a1cd0878e8f31371b3d799ea57cf6" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.085843 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.123770 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjc7p\" (UniqueName: \"kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p\") pod \"mariadb-client-2-default\" (UID: \"5393a79f-8d2d-4c93-bd22-501de1c9a493\") " pod="openstack/mariadb-client-2-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.140529 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjc7p\" (UniqueName: \"kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p\") pod \"mariadb-client-2-default\" (UID: \"5393a79f-8d2d-4c93-bd22-501de1c9a493\") " pod="openstack/mariadb-client-2-default" Oct 09 14:47:17 crc kubenswrapper[4762]: I1009 14:47:17.332512 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 09 14:47:18 crc kubenswrapper[4762]: I1009 14:47:18.586155 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 09 14:47:18 crc kubenswrapper[4762]: W1009 14:47:18.590750 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5393a79f_8d2d_4c93_bd22_501de1c9a493.slice/crio-ff063434a01d181ed25867367bd3f7c11061c9769cedf7b8e82805711ac0c022 WatchSource:0}: Error finding container ff063434a01d181ed25867367bd3f7c11061c9769cedf7b8e82805711ac0c022: Status 404 returned error can't find the container with id ff063434a01d181ed25867367bd3f7c11061c9769cedf7b8e82805711ac0c022 Oct 09 14:47:19 crc kubenswrapper[4762]: I1009 14:47:19.107810 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"5393a79f-8d2d-4c93-bd22-501de1c9a493","Type":"ContainerStarted","Data":"dc4f313ab5d5a69920d1b58c713b06e5ed0a9e035ded3f48d92455732392f7f0"} Oct 09 14:47:19 crc kubenswrapper[4762]: I1009 14:47:19.107896 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"5393a79f-8d2d-4c93-bd22-501de1c9a493","Type":"ContainerStarted","Data":"ff063434a01d181ed25867367bd3f7c11061c9769cedf7b8e82805711ac0c022"} Oct 09 14:47:19 crc kubenswrapper[4762]: I1009 14:47:19.129394 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=3.129373841 podStartE2EDuration="3.129373841s" podCreationTimestamp="2025-10-09 14:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:47:19.126303012 +0000 UTC m=+4914.900094151" watchObservedRunningTime="2025-10-09 14:47:19.129373841 +0000 UTC m=+4914.903164890" Oct 09 14:47:20 crc kubenswrapper[4762]: I1009 14:47:20.119891 4762 generic.go:334] "Generic (PLEG): container finished" podID="5393a79f-8d2d-4c93-bd22-501de1c9a493" containerID="dc4f313ab5d5a69920d1b58c713b06e5ed0a9e035ded3f48d92455732392f7f0" exitCode=0 Oct 09 14:47:20 crc kubenswrapper[4762]: I1009 14:47:20.119990 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"5393a79f-8d2d-4c93-bd22-501de1c9a493","Type":"ContainerDied","Data":"dc4f313ab5d5a69920d1b58c713b06e5ed0a9e035ded3f48d92455732392f7f0"} Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.620267 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.663168 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.670448 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.704215 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjc7p\" (UniqueName: \"kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p\") pod \"5393a79f-8d2d-4c93-bd22-501de1c9a493\" (UID: \"5393a79f-8d2d-4c93-bd22-501de1c9a493\") " Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.710838 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p" (OuterVolumeSpecName: "kube-api-access-xjc7p") pod "5393a79f-8d2d-4c93-bd22-501de1c9a493" (UID: "5393a79f-8d2d-4c93-bd22-501de1c9a493"). InnerVolumeSpecName "kube-api-access-xjc7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:21 crc kubenswrapper[4762]: I1009 14:47:21.805576 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjc7p\" (UniqueName: \"kubernetes.io/projected/5393a79f-8d2d-4c93-bd22-501de1c9a493-kube-api-access-xjc7p\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.118333 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Oct 09 14:47:22 crc kubenswrapper[4762]: E1009 14:47:22.118774 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5393a79f-8d2d-4c93-bd22-501de1c9a493" containerName="mariadb-client-2-default" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.118800 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5393a79f-8d2d-4c93-bd22-501de1c9a493" containerName="mariadb-client-2-default" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.119032 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5393a79f-8d2d-4c93-bd22-501de1c9a493" containerName="mariadb-client-2-default" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.120298 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.129073 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.151914 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff063434a01d181ed25867367bd3f7c11061c9769cedf7b8e82805711ac0c022" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.151980 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.214660 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgx88\" (UniqueName: \"kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88\") pod \"mariadb-client-1\" (UID: \"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651\") " pod="openstack/mariadb-client-1" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.317077 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgx88\" (UniqueName: \"kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88\") pod \"mariadb-client-1\" (UID: \"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651\") " pod="openstack/mariadb-client-1" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.340061 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgx88\" (UniqueName: \"kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88\") pod \"mariadb-client-1\" (UID: \"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651\") " pod="openstack/mariadb-client-1" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.457397 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 09 14:47:22 crc kubenswrapper[4762]: I1009 14:47:22.983948 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5393a79f-8d2d-4c93-bd22-501de1c9a493" path="/var/lib/kubelet/pods/5393a79f-8d2d-4c93-bd22-501de1c9a493/volumes" Oct 09 14:47:23 crc kubenswrapper[4762]: I1009 14:47:23.069984 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 09 14:47:23 crc kubenswrapper[4762]: I1009 14:47:23.167469 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651","Type":"ContainerStarted","Data":"7a953af6d4ca4fa76012efac6bb1b15688690f11367ad2b8d24a4d656c65571c"} Oct 09 14:47:24 crc kubenswrapper[4762]: I1009 14:47:24.179481 4762 generic.go:334] "Generic (PLEG): container finished" podID="4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" containerID="6328c29e48f14036c15fc449ecfe34f22a9585dd28b2d2e9b9b85569ea9f4da8" exitCode=0 Oct 09 14:47:24 crc kubenswrapper[4762]: I1009 14:47:24.179575 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651","Type":"ContainerDied","Data":"6328c29e48f14036c15fc449ecfe34f22a9585dd28b2d2e9b9b85569ea9f4da8"} Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.529991 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.547464 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_4a3dd6e7-16d3-4da5-8b35-de51e7d0c651/mariadb-client-1/0.log" Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.566548 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgx88\" (UniqueName: \"kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88\") pod \"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651\" (UID: \"4a3dd6e7-16d3-4da5-8b35-de51e7d0c651\") " Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.571364 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.572734 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88" (OuterVolumeSpecName: "kube-api-access-kgx88") pod "4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" (UID: "4a3dd6e7-16d3-4da5-8b35-de51e7d0c651"). InnerVolumeSpecName "kube-api-access-kgx88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.577050 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Oct 09 14:47:25 crc kubenswrapper[4762]: I1009 14:47:25.668211 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgx88\" (UniqueName: \"kubernetes.io/projected/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651-kube-api-access-kgx88\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.060044 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Oct 09 14:47:26 crc kubenswrapper[4762]: E1009 14:47:26.060503 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" containerName="mariadb-client-1" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.060554 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" containerName="mariadb-client-1" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.060864 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" containerName="mariadb-client-1" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.061597 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.070304 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.074436 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpvwz\" (UniqueName: \"kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz\") pod \"mariadb-client-4-default\" (UID: \"c6db95ab-8147-4e98-a86a-1500342388d5\") " pod="openstack/mariadb-client-4-default" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.176395 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpvwz\" (UniqueName: \"kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz\") pod \"mariadb-client-4-default\" (UID: \"c6db95ab-8147-4e98-a86a-1500342388d5\") " pod="openstack/mariadb-client-4-default" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.197792 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a953af6d4ca4fa76012efac6bb1b15688690f11367ad2b8d24a4d656c65571c" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.197881 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.197898 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpvwz\" (UniqueName: \"kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz\") pod \"mariadb-client-4-default\" (UID: \"c6db95ab-8147-4e98-a86a-1500342388d5\") " pod="openstack/mariadb-client-4-default" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.394698 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.711648 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 09 14:47:26 crc kubenswrapper[4762]: I1009 14:47:26.974562 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a3dd6e7-16d3-4da5-8b35-de51e7d0c651" path="/var/lib/kubelet/pods/4a3dd6e7-16d3-4da5-8b35-de51e7d0c651/volumes" Oct 09 14:47:27 crc kubenswrapper[4762]: I1009 14:47:27.227363 4762 generic.go:334] "Generic (PLEG): container finished" podID="c6db95ab-8147-4e98-a86a-1500342388d5" containerID="a4a9a1b87ca82d38d7d6b6a0bcf5ecbf8ba51e981c5b338e7fa7ff820d4ae40b" exitCode=0 Oct 09 14:47:27 crc kubenswrapper[4762]: I1009 14:47:27.227442 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"c6db95ab-8147-4e98-a86a-1500342388d5","Type":"ContainerDied","Data":"a4a9a1b87ca82d38d7d6b6a0bcf5ecbf8ba51e981c5b338e7fa7ff820d4ae40b"} Oct 09 14:47:27 crc kubenswrapper[4762]: I1009 14:47:27.227483 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"c6db95ab-8147-4e98-a86a-1500342388d5","Type":"ContainerStarted","Data":"2ffa72520908372eac6f2a45a054c339e0f845d59d230225469bb41850e07b11"} Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.585398 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.610283 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpvwz\" (UniqueName: \"kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz\") pod \"c6db95ab-8147-4e98-a86a-1500342388d5\" (UID: \"c6db95ab-8147-4e98-a86a-1500342388d5\") " Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.612936 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_c6db95ab-8147-4e98-a86a-1500342388d5/mariadb-client-4-default/0.log" Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.619288 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz" (OuterVolumeSpecName: "kube-api-access-kpvwz") pod "c6db95ab-8147-4e98-a86a-1500342388d5" (UID: "c6db95ab-8147-4e98-a86a-1500342388d5"). InnerVolumeSpecName "kube-api-access-kpvwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.640485 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.645069 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.712652 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpvwz\" (UniqueName: \"kubernetes.io/projected/c6db95ab-8147-4e98-a86a-1500342388d5-kube-api-access-kpvwz\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:28 crc kubenswrapper[4762]: I1009 14:47:28.989048 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6db95ab-8147-4e98-a86a-1500342388d5" path="/var/lib/kubelet/pods/c6db95ab-8147-4e98-a86a-1500342388d5/volumes" Oct 09 14:47:29 crc kubenswrapper[4762]: I1009 14:47:29.243096 4762 scope.go:117] "RemoveContainer" containerID="a4a9a1b87ca82d38d7d6b6a0bcf5ecbf8ba51e981c5b338e7fa7ff820d4ae40b" Oct 09 14:47:29 crc kubenswrapper[4762]: I1009 14:47:29.243152 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.179900 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Oct 09 14:47:33 crc kubenswrapper[4762]: E1009 14:47:33.180627 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6db95ab-8147-4e98-a86a-1500342388d5" containerName="mariadb-client-4-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.180667 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6db95ab-8147-4e98-a86a-1500342388d5" containerName="mariadb-client-4-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.180859 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6db95ab-8147-4e98-a86a-1500342388d5" containerName="mariadb-client-4-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.181712 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.183733 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gjg68" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.189727 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.288599 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vxw4\" (UniqueName: \"kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4\") pod \"mariadb-client-5-default\" (UID: \"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3\") " pod="openstack/mariadb-client-5-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.390124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vxw4\" (UniqueName: \"kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4\") pod \"mariadb-client-5-default\" (UID: \"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3\") " pod="openstack/mariadb-client-5-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.414404 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vxw4\" (UniqueName: \"kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4\") pod \"mariadb-client-5-default\" (UID: \"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3\") " pod="openstack/mariadb-client-5-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.508383 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 09 14:47:33 crc kubenswrapper[4762]: I1009 14:47:33.989163 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:33.999703 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.001280 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.019231 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.099816 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.100009 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47fp5\" (UniqueName: \"kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.100045 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.191083 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.192491 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.201156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.201236 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.201302 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47fp5\" (UniqueName: \"kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.202200 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.202423 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.208123 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.235717 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47fp5\" (UniqueName: \"kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5\") pod \"certified-operators-6mwws\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.292147 4762 generic.go:334] "Generic (PLEG): container finished" podID="f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" containerID="6a88fae016ca2e7c1536581d6d36c2458cdeea0d6afb1390f73f7bcaaded3dcd" exitCode=0 Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.292201 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3","Type":"ContainerDied","Data":"6a88fae016ca2e7c1536581d6d36c2458cdeea0d6afb1390f73f7bcaaded3dcd"} Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.292232 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3","Type":"ContainerStarted","Data":"ebccc3a18e03a323e57fce479d954def67380119873b5f83a52f1462211aa81f"} Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.303923 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-876cs\" (UniqueName: \"kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.304006 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.304110 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.345624 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.405482 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.405597 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.405704 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-876cs\" (UniqueName: \"kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.405976 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.406024 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.426353 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-876cs\" (UniqueName: \"kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs\") pod \"community-operators-989sk\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.512062 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:34 crc kubenswrapper[4762]: I1009 14:47:34.671287 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.091318 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:35 crc kubenswrapper[4762]: W1009 14:47:35.097116 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf136553c_e186_4706_9b54_ded4838711b4.slice/crio-190763152a01ad603185424ae2ba997daeda1fb6bb0f243c1d234b19eedd92fc WatchSource:0}: Error finding container 190763152a01ad603185424ae2ba997daeda1fb6bb0f243c1d234b19eedd92fc: Status 404 returned error can't find the container with id 190763152a01ad603185424ae2ba997daeda1fb6bb0f243c1d234b19eedd92fc Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.299624 4762 generic.go:334] "Generic (PLEG): container finished" podID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerID="abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db" exitCode=0 Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.299701 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerDied","Data":"abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db"} Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.299728 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerStarted","Data":"b3b7d440d1f725468a5ba7af41405f09a32c2d1af8ddbdafe6f9c213cdbf703e"} Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.301486 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.302457 4762 generic.go:334] "Generic (PLEG): container finished" podID="f136553c-e186-4706-9b54-ded4838711b4" containerID="c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5" exitCode=0 Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.303082 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerDied","Data":"c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5"} Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.303382 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerStarted","Data":"190763152a01ad603185424ae2ba997daeda1fb6bb0f243c1d234b19eedd92fc"} Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.650687 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.667243 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3/mariadb-client-5-default/0.log" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.708807 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.718196 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.739410 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vxw4\" (UniqueName: \"kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4\") pod \"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3\" (UID: \"f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3\") " Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.744657 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4" (OuterVolumeSpecName: "kube-api-access-4vxw4") pod "f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" (UID: "f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3"). InnerVolumeSpecName "kube-api-access-4vxw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.836046 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Oct 09 14:47:35 crc kubenswrapper[4762]: E1009 14:47:35.836412 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" containerName="mariadb-client-5-default" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.836436 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" containerName="mariadb-client-5-default" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.836656 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" containerName="mariadb-client-5-default" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.837148 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.837228 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.858108 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vxw4\" (UniqueName: \"kubernetes.io/projected/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3-kube-api-access-4vxw4\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:35 crc kubenswrapper[4762]: I1009 14:47:35.959707 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42dtw\" (UniqueName: \"kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw\") pod \"mariadb-client-6-default\" (UID: \"06856e79-195d-4ed4-b724-f3372ab3130e\") " pod="openstack/mariadb-client-6-default" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.061402 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42dtw\" (UniqueName: \"kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw\") pod \"mariadb-client-6-default\" (UID: \"06856e79-195d-4ed4-b724-f3372ab3130e\") " pod="openstack/mariadb-client-6-default" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.078564 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42dtw\" (UniqueName: \"kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw\") pod \"mariadb-client-6-default\" (UID: \"06856e79-195d-4ed4-b724-f3372ab3130e\") " pod="openstack/mariadb-client-6-default" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.201076 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.322260 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebccc3a18e03a323e57fce479d954def67380119873b5f83a52f1462211aa81f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.322346 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.333510 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerStarted","Data":"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0"} Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.402812 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.408005 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.408150 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.467399 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.467651 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.467801 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lszj\" (UniqueName: \"kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.569138 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.569306 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.569365 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lszj\" (UniqueName: \"kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.570034 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.570269 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.592110 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lszj\" (UniqueName: \"kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj\") pod \"redhat-marketplace-kgg7f\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.747545 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 09 14:47:36 crc kubenswrapper[4762]: W1009 14:47:36.749458 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06856e79_195d_4ed4_b724_f3372ab3130e.slice/crio-54b69be0235d82461767259034b1f72a2ce9710ea800f56cc2c237482618ee9d WatchSource:0}: Error finding container 54b69be0235d82461767259034b1f72a2ce9710ea800f56cc2c237482618ee9d: Status 404 returned error can't find the container with id 54b69be0235d82461767259034b1f72a2ce9710ea800f56cc2c237482618ee9d Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.765016 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:36 crc kubenswrapper[4762]: I1009 14:47:36.980529 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3" path="/var/lib/kubelet/pods/f271eb2e-f6cc-4deb-a4b1-41ef2c6bafb3/volumes" Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.182256 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:37 crc kubenswrapper[4762]: W1009 14:47:37.201275 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c0606a9_0001_4f47_997f_194faab9d68a.slice/crio-62fd2510c64345184d777f0b4c2ed2af552f66e9e8f4c69ea03fe6028071b13c WatchSource:0}: Error finding container 62fd2510c64345184d777f0b4c2ed2af552f66e9e8f4c69ea03fe6028071b13c: Status 404 returned error can't find the container with id 62fd2510c64345184d777f0b4c2ed2af552f66e9e8f4c69ea03fe6028071b13c Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.343436 4762 generic.go:334] "Generic (PLEG): container finished" podID="f136553c-e186-4706-9b54-ded4838711b4" containerID="4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515" exitCode=0 Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.343528 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerDied","Data":"4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.345714 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerStarted","Data":"62fd2510c64345184d777f0b4c2ed2af552f66e9e8f4c69ea03fe6028071b13c"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.347624 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"06856e79-195d-4ed4-b724-f3372ab3130e","Type":"ContainerStarted","Data":"c6e7f1be2eab9b2ec2521169e4430dfbc9c38012054b456a66d40cf929577bf1"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.347710 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"06856e79-195d-4ed4-b724-f3372ab3130e","Type":"ContainerStarted","Data":"54b69be0235d82461767259034b1f72a2ce9710ea800f56cc2c237482618ee9d"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.350476 4762 generic.go:334] "Generic (PLEG): container finished" podID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerID="8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0" exitCode=0 Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.350520 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerDied","Data":"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.350542 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerStarted","Data":"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323"} Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.381558 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6mwws" podStartSLOduration=2.721686337 podStartE2EDuration="4.381538278s" podCreationTimestamp="2025-10-09 14:47:33 +0000 UTC" firstStartedPulling="2025-10-09 14:47:35.301303907 +0000 UTC m=+4931.075094936" lastFinishedPulling="2025-10-09 14:47:36.961155828 +0000 UTC m=+4932.734946877" observedRunningTime="2025-10-09 14:47:37.37622018 +0000 UTC m=+4933.150011249" watchObservedRunningTime="2025-10-09 14:47:37.381538278 +0000 UTC m=+4933.155329317" Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.398709 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=2.398689113 podStartE2EDuration="2.398689113s" podCreationTimestamp="2025-10-09 14:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:47:37.388354785 +0000 UTC m=+4933.162145834" watchObservedRunningTime="2025-10-09 14:47:37.398689113 +0000 UTC m=+4933.172480152" Oct 09 14:47:37 crc kubenswrapper[4762]: I1009 14:47:37.966920 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_06856e79-195d-4ed4-b724-f3372ab3130e/mariadb-client-6-default/0.log" Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.359481 4762 generic.go:334] "Generic (PLEG): container finished" podID="7c0606a9-0001-4f47-997f-194faab9d68a" containerID="1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5" exitCode=0 Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.359541 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerDied","Data":"1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5"} Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.361372 4762 generic.go:334] "Generic (PLEG): container finished" podID="06856e79-195d-4ed4-b724-f3372ab3130e" containerID="c6e7f1be2eab9b2ec2521169e4430dfbc9c38012054b456a66d40cf929577bf1" exitCode=0 Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.361445 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"06856e79-195d-4ed4-b724-f3372ab3130e","Type":"ContainerDied","Data":"c6e7f1be2eab9b2ec2521169e4430dfbc9c38012054b456a66d40cf929577bf1"} Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.370086 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerStarted","Data":"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed"} Oct 09 14:47:38 crc kubenswrapper[4762]: I1009 14:47:38.425088 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-989sk" podStartSLOduration=1.9301337219999999 podStartE2EDuration="4.425066467s" podCreationTimestamp="2025-10-09 14:47:34 +0000 UTC" firstStartedPulling="2025-10-09 14:47:35.304191681 +0000 UTC m=+4931.077982720" lastFinishedPulling="2025-10-09 14:47:37.799124416 +0000 UTC m=+4933.572915465" observedRunningTime="2025-10-09 14:47:38.418676502 +0000 UTC m=+4934.192467561" watchObservedRunningTime="2025-10-09 14:47:38.425066467 +0000 UTC m=+4934.198857516" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.788223 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.824570 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.832311 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.919199 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42dtw\" (UniqueName: \"kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw\") pod \"06856e79-195d-4ed4-b724-f3372ab3130e\" (UID: \"06856e79-195d-4ed4-b724-f3372ab3130e\") " Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.925941 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw" (OuterVolumeSpecName: "kube-api-access-42dtw") pod "06856e79-195d-4ed4-b724-f3372ab3130e" (UID: "06856e79-195d-4ed4-b724-f3372ab3130e"). InnerVolumeSpecName "kube-api-access-42dtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.964668 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Oct 09 14:47:39 crc kubenswrapper[4762]: E1009 14:47:39.966745 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06856e79-195d-4ed4-b724-f3372ab3130e" containerName="mariadb-client-6-default" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.966767 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="06856e79-195d-4ed4-b724-f3372ab3130e" containerName="mariadb-client-6-default" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.966933 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="06856e79-195d-4ed4-b724-f3372ab3130e" containerName="mariadb-client-6-default" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.968353 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 09 14:47:39 crc kubenswrapper[4762]: I1009 14:47:39.976326 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.021243 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kph4\" (UniqueName: \"kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4\") pod \"mariadb-client-7-default\" (UID: \"c231d529-952d-4e30-9817-2e80b6a87122\") " pod="openstack/mariadb-client-7-default" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.021672 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42dtw\" (UniqueName: \"kubernetes.io/projected/06856e79-195d-4ed4-b724-f3372ab3130e-kube-api-access-42dtw\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.122514 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kph4\" (UniqueName: \"kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4\") pod \"mariadb-client-7-default\" (UID: \"c231d529-952d-4e30-9817-2e80b6a87122\") " pod="openstack/mariadb-client-7-default" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.139574 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kph4\" (UniqueName: \"kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4\") pod \"mariadb-client-7-default\" (UID: \"c231d529-952d-4e30-9817-2e80b6a87122\") " pod="openstack/mariadb-client-7-default" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.289139 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.391434 4762 generic.go:334] "Generic (PLEG): container finished" podID="7c0606a9-0001-4f47-997f-194faab9d68a" containerID="cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d" exitCode=0 Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.391840 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerDied","Data":"cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d"} Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.396099 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54b69be0235d82461767259034b1f72a2ce9710ea800f56cc2c237482618ee9d" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.396175 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.865564 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 09 14:47:40 crc kubenswrapper[4762]: W1009 14:47:40.872353 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc231d529_952d_4e30_9817_2e80b6a87122.slice/crio-549e5cc32cc428a811a3650533d6e66fcb6a3c9ca09facdb3be68bcbeee5eec6 WatchSource:0}: Error finding container 549e5cc32cc428a811a3650533d6e66fcb6a3c9ca09facdb3be68bcbeee5eec6: Status 404 returned error can't find the container with id 549e5cc32cc428a811a3650533d6e66fcb6a3c9ca09facdb3be68bcbeee5eec6 Oct 09 14:47:40 crc kubenswrapper[4762]: I1009 14:47:40.975723 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06856e79-195d-4ed4-b724-f3372ab3130e" path="/var/lib/kubelet/pods/06856e79-195d-4ed4-b724-f3372ab3130e/volumes" Oct 09 14:47:41 crc kubenswrapper[4762]: I1009 14:47:41.410856 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerStarted","Data":"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a"} Oct 09 14:47:41 crc kubenswrapper[4762]: I1009 14:47:41.413309 4762 generic.go:334] "Generic (PLEG): container finished" podID="c231d529-952d-4e30-9817-2e80b6a87122" containerID="7e7a96f947cf1dd28eaa0931da4a1b1556c156d60eeb82b43aca465eaea9374a" exitCode=0 Oct 09 14:47:41 crc kubenswrapper[4762]: I1009 14:47:41.413349 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"c231d529-952d-4e30-9817-2e80b6a87122","Type":"ContainerDied","Data":"7e7a96f947cf1dd28eaa0931da4a1b1556c156d60eeb82b43aca465eaea9374a"} Oct 09 14:47:41 crc kubenswrapper[4762]: I1009 14:47:41.413371 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"c231d529-952d-4e30-9817-2e80b6a87122","Type":"ContainerStarted","Data":"549e5cc32cc428a811a3650533d6e66fcb6a3c9ca09facdb3be68bcbeee5eec6"} Oct 09 14:47:41 crc kubenswrapper[4762]: I1009 14:47:41.435821 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kgg7f" podStartSLOduration=2.961298862 podStartE2EDuration="5.435799677s" podCreationTimestamp="2025-10-09 14:47:36 +0000 UTC" firstStartedPulling="2025-10-09 14:47:38.36270138 +0000 UTC m=+4934.136492409" lastFinishedPulling="2025-10-09 14:47:40.837202185 +0000 UTC m=+4936.610993224" observedRunningTime="2025-10-09 14:47:41.4343853 +0000 UTC m=+4937.208176349" watchObservedRunningTime="2025-10-09 14:47:41.435799677 +0000 UTC m=+4937.209590716" Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.803436 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.822520 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_c231d529-952d-4e30-9817-2e80b6a87122/mariadb-client-7-default/0.log" Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.852355 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.859922 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.867478 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kph4\" (UniqueName: \"kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4\") pod \"c231d529-952d-4e30-9817-2e80b6a87122\" (UID: \"c231d529-952d-4e30-9817-2e80b6a87122\") " Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.872833 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4" (OuterVolumeSpecName: "kube-api-access-7kph4") pod "c231d529-952d-4e30-9817-2e80b6a87122" (UID: "c231d529-952d-4e30-9817-2e80b6a87122"). InnerVolumeSpecName "kube-api-access-7kph4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.970523 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kph4\" (UniqueName: \"kubernetes.io/projected/c231d529-952d-4e30-9817-2e80b6a87122-kube-api-access-7kph4\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:42 crc kubenswrapper[4762]: I1009 14:47:42.985438 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c231d529-952d-4e30-9817-2e80b6a87122" path="/var/lib/kubelet/pods/c231d529-952d-4e30-9817-2e80b6a87122/volumes" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.004555 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Oct 09 14:47:43 crc kubenswrapper[4762]: E1009 14:47:43.004969 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c231d529-952d-4e30-9817-2e80b6a87122" containerName="mariadb-client-7-default" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.004987 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c231d529-952d-4e30-9817-2e80b6a87122" containerName="mariadb-client-7-default" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.005191 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c231d529-952d-4e30-9817-2e80b6a87122" containerName="mariadb-client-7-default" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.005753 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.013373 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.071679 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drrjv\" (UniqueName: \"kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv\") pod \"mariadb-client-2\" (UID: \"24274578-a57d-47d9-bf81-49d83c11452d\") " pod="openstack/mariadb-client-2" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.173180 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drrjv\" (UniqueName: \"kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv\") pod \"mariadb-client-2\" (UID: \"24274578-a57d-47d9-bf81-49d83c11452d\") " pod="openstack/mariadb-client-2" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.195740 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drrjv\" (UniqueName: \"kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv\") pod \"mariadb-client-2\" (UID: \"24274578-a57d-47d9-bf81-49d83c11452d\") " pod="openstack/mariadb-client-2" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.329941 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.500733 4762 scope.go:117] "RemoveContainer" containerID="7e7a96f947cf1dd28eaa0931da4a1b1556c156d60eeb82b43aca465eaea9374a" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.500861 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 09 14:47:43 crc kubenswrapper[4762]: I1009 14:47:43.708545 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 09 14:47:43 crc kubenswrapper[4762]: W1009 14:47:43.713254 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24274578_a57d_47d9_bf81_49d83c11452d.slice/crio-616110cbbc7b9987f08c9ce7116bea317055736162db1f9ba536e6008b567e88 WatchSource:0}: Error finding container 616110cbbc7b9987f08c9ce7116bea317055736162db1f9ba536e6008b567e88: Status 404 returned error can't find the container with id 616110cbbc7b9987f08c9ce7116bea317055736162db1f9ba536e6008b567e88 Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.346767 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.346856 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.414874 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.512894 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.512991 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.516627 4762 generic.go:334] "Generic (PLEG): container finished" podID="24274578-a57d-47d9-bf81-49d83c11452d" containerID="ee89763b25466905e8dd8baf2edb3e80825ee66aa884f9eb56d914103cb24cea" exitCode=0 Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.516785 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"24274578-a57d-47d9-bf81-49d83c11452d","Type":"ContainerDied","Data":"ee89763b25466905e8dd8baf2edb3e80825ee66aa884f9eb56d914103cb24cea"} Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.516833 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"24274578-a57d-47d9-bf81-49d83c11452d","Type":"ContainerStarted","Data":"616110cbbc7b9987f08c9ce7116bea317055736162db1f9ba536e6008b567e88"} Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.583319 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:44 crc kubenswrapper[4762]: I1009 14:47:44.588213 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.189723 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.595119 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.913625 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.931482 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_24274578-a57d-47d9-bf81-49d83c11452d/mariadb-client-2/0.log" Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.959785 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Oct 09 14:47:45 crc kubenswrapper[4762]: I1009 14:47:45.965552 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.055434 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drrjv\" (UniqueName: \"kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv\") pod \"24274578-a57d-47d9-bf81-49d83c11452d\" (UID: \"24274578-a57d-47d9-bf81-49d83c11452d\") " Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.062120 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv" (OuterVolumeSpecName: "kube-api-access-drrjv") pod "24274578-a57d-47d9-bf81-49d83c11452d" (UID: "24274578-a57d-47d9-bf81-49d83c11452d"). InnerVolumeSpecName "kube-api-access-drrjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.157420 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drrjv\" (UniqueName: \"kubernetes.io/projected/24274578-a57d-47d9-bf81-49d83c11452d-kube-api-access-drrjv\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.534875 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="616110cbbc7b9987f08c9ce7116bea317055736162db1f9ba536e6008b567e88" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.535025 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6mwws" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="registry-server" containerID="cri-o://993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323" gracePeriod=2 Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.535389 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.766369 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.766457 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.834083 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.987264 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24274578-a57d-47d9-bf81-49d83c11452d" path="/var/lib/kubelet/pods/24274578-a57d-47d9-bf81-49d83c11452d/volumes" Oct 09 14:47:46 crc kubenswrapper[4762]: I1009 14:47:46.988355 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.047612 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.176396 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47fp5\" (UniqueName: \"kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5\") pod \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.176803 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content\") pod \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.176871 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities\") pod \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\" (UID: \"3b17ffb5-31c0-4737-a8fa-9c8138af2c25\") " Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.178712 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities" (OuterVolumeSpecName: "utilities") pod "3b17ffb5-31c0-4737-a8fa-9c8138af2c25" (UID: "3b17ffb5-31c0-4737-a8fa-9c8138af2c25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.184677 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5" (OuterVolumeSpecName: "kube-api-access-47fp5") pod "3b17ffb5-31c0-4737-a8fa-9c8138af2c25" (UID: "3b17ffb5-31c0-4737-a8fa-9c8138af2c25"). InnerVolumeSpecName "kube-api-access-47fp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.219447 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b17ffb5-31c0-4737-a8fa-9c8138af2c25" (UID: "3b17ffb5-31c0-4737-a8fa-9c8138af2c25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.278216 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.278251 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.278267 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47fp5\" (UniqueName: \"kubernetes.io/projected/3b17ffb5-31c0-4737-a8fa-9c8138af2c25-kube-api-access-47fp5\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.548712 4762 generic.go:334] "Generic (PLEG): container finished" podID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerID="993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323" exitCode=0 Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.549424 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerDied","Data":"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323"} Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.549481 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6mwws" event={"ID":"3b17ffb5-31c0-4737-a8fa-9c8138af2c25","Type":"ContainerDied","Data":"b3b7d440d1f725468a5ba7af41405f09a32c2d1af8ddbdafe6f9c213cdbf703e"} Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.549523 4762 scope.go:117] "RemoveContainer" containerID="993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.549587 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6mwws" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.588878 4762 scope.go:117] "RemoveContainer" containerID="8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.592617 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.597700 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6mwws"] Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.613843 4762 scope.go:117] "RemoveContainer" containerID="abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.614198 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.645035 4762 scope.go:117] "RemoveContainer" containerID="993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323" Oct 09 14:47:47 crc kubenswrapper[4762]: E1009 14:47:47.645518 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323\": container with ID starting with 993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323 not found: ID does not exist" containerID="993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.645566 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323"} err="failed to get container status \"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323\": rpc error: code = NotFound desc = could not find container \"993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323\": container with ID starting with 993898e97bf561dd676f2e6ef309df57a1f92d644a4d30ff0a8fba6a75a32323 not found: ID does not exist" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.645593 4762 scope.go:117] "RemoveContainer" containerID="8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0" Oct 09 14:47:47 crc kubenswrapper[4762]: E1009 14:47:47.646151 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0\": container with ID starting with 8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0 not found: ID does not exist" containerID="8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.646183 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0"} err="failed to get container status \"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0\": rpc error: code = NotFound desc = could not find container \"8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0\": container with ID starting with 8894d47ace53da084a0ce0aea595f7d861ecf676570b9b69ee9146e2a8f65ef0 not found: ID does not exist" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.646201 4762 scope.go:117] "RemoveContainer" containerID="abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db" Oct 09 14:47:47 crc kubenswrapper[4762]: E1009 14:47:47.646590 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db\": container with ID starting with abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db not found: ID does not exist" containerID="abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db" Oct 09 14:47:47 crc kubenswrapper[4762]: I1009 14:47:47.646619 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db"} err="failed to get container status \"abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db\": rpc error: code = NotFound desc = could not find container \"abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db\": container with ID starting with abfa408c7e5a153bc7d0c90b12d2afa20186913a512627ae7a2a3a18559099db not found: ID does not exist" Oct 09 14:47:48 crc kubenswrapper[4762]: I1009 14:47:48.559548 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-989sk" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="registry-server" containerID="cri-o://ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed" gracePeriod=2 Oct 09 14:47:48 crc kubenswrapper[4762]: I1009 14:47:48.927805 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:48 crc kubenswrapper[4762]: I1009 14:47:48.975046 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" path="/var/lib/kubelet/pods/3b17ffb5-31c0-4737-a8fa-9c8138af2c25/volumes" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.008355 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities\") pod \"f136553c-e186-4706-9b54-ded4838711b4\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.008419 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content\") pod \"f136553c-e186-4706-9b54-ded4838711b4\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.008443 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-876cs\" (UniqueName: \"kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs\") pod \"f136553c-e186-4706-9b54-ded4838711b4\" (UID: \"f136553c-e186-4706-9b54-ded4838711b4\") " Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.010539 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities" (OuterVolumeSpecName: "utilities") pod "f136553c-e186-4706-9b54-ded4838711b4" (UID: "f136553c-e186-4706-9b54-ded4838711b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.013279 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs" (OuterVolumeSpecName: "kube-api-access-876cs") pod "f136553c-e186-4706-9b54-ded4838711b4" (UID: "f136553c-e186-4706-9b54-ded4838711b4"). InnerVolumeSpecName "kube-api-access-876cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.055413 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f136553c-e186-4706-9b54-ded4838711b4" (UID: "f136553c-e186-4706-9b54-ded4838711b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.110676 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.110724 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f136553c-e186-4706-9b54-ded4838711b4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.110750 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-876cs\" (UniqueName: \"kubernetes.io/projected/f136553c-e186-4706-9b54-ded4838711b4-kube-api-access-876cs\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.386496 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.568468 4762 generic.go:334] "Generic (PLEG): container finished" podID="f136553c-e186-4706-9b54-ded4838711b4" containerID="ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed" exitCode=0 Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.569457 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kgg7f" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="registry-server" containerID="cri-o://132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a" gracePeriod=2 Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.569913 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-989sk" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.569932 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerDied","Data":"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed"} Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.572307 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-989sk" event={"ID":"f136553c-e186-4706-9b54-ded4838711b4","Type":"ContainerDied","Data":"190763152a01ad603185424ae2ba997daeda1fb6bb0f243c1d234b19eedd92fc"} Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.572340 4762 scope.go:117] "RemoveContainer" containerID="ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.610358 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.615944 4762 scope.go:117] "RemoveContainer" containerID="4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.617107 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-989sk"] Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.665933 4762 scope.go:117] "RemoveContainer" containerID="c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.742203 4762 scope.go:117] "RemoveContainer" containerID="ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed" Oct 09 14:47:49 crc kubenswrapper[4762]: E1009 14:47:49.742586 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed\": container with ID starting with ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed not found: ID does not exist" containerID="ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.742614 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed"} err="failed to get container status \"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed\": rpc error: code = NotFound desc = could not find container \"ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed\": container with ID starting with ca1137e7045a34a793d4b22b598cb9ce537b28acbfee24ac9f62bcf4785d59ed not found: ID does not exist" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.742654 4762 scope.go:117] "RemoveContainer" containerID="4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515" Oct 09 14:47:49 crc kubenswrapper[4762]: E1009 14:47:49.743367 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515\": container with ID starting with 4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515 not found: ID does not exist" containerID="4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.743393 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515"} err="failed to get container status \"4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515\": rpc error: code = NotFound desc = could not find container \"4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515\": container with ID starting with 4e7f57f56270abdaf50216e3e827779a02435c4e43182040d14c815fc5101515 not found: ID does not exist" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.743408 4762 scope.go:117] "RemoveContainer" containerID="c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5" Oct 09 14:47:49 crc kubenswrapper[4762]: E1009 14:47:49.743773 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5\": container with ID starting with c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5 not found: ID does not exist" containerID="c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5" Oct 09 14:47:49 crc kubenswrapper[4762]: I1009 14:47:49.743793 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5"} err="failed to get container status \"c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5\": rpc error: code = NotFound desc = could not find container \"c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5\": container with ID starting with c8eef6c6cb4dbb4d313ec98b085e24536d44e10bb84980c5932ba780c47e09d5 not found: ID does not exist" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.027477 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.125740 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lszj\" (UniqueName: \"kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj\") pod \"7c0606a9-0001-4f47-997f-194faab9d68a\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.125819 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities\") pod \"7c0606a9-0001-4f47-997f-194faab9d68a\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.125842 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content\") pod \"7c0606a9-0001-4f47-997f-194faab9d68a\" (UID: \"7c0606a9-0001-4f47-997f-194faab9d68a\") " Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.127005 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities" (OuterVolumeSpecName: "utilities") pod "7c0606a9-0001-4f47-997f-194faab9d68a" (UID: "7c0606a9-0001-4f47-997f-194faab9d68a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.130129 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj" (OuterVolumeSpecName: "kube-api-access-5lszj") pod "7c0606a9-0001-4f47-997f-194faab9d68a" (UID: "7c0606a9-0001-4f47-997f-194faab9d68a"). InnerVolumeSpecName "kube-api-access-5lszj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.143090 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c0606a9-0001-4f47-997f-194faab9d68a" (UID: "7c0606a9-0001-4f47-997f-194faab9d68a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.227518 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lszj\" (UniqueName: \"kubernetes.io/projected/7c0606a9-0001-4f47-997f-194faab9d68a-kube-api-access-5lszj\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.227564 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.227575 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c0606a9-0001-4f47-997f-194faab9d68a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.591415 4762 generic.go:334] "Generic (PLEG): container finished" podID="7c0606a9-0001-4f47-997f-194faab9d68a" containerID="132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a" exitCode=0 Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.591520 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerDied","Data":"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a"} Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.591605 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kgg7f" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.592433 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kgg7f" event={"ID":"7c0606a9-0001-4f47-997f-194faab9d68a","Type":"ContainerDied","Data":"62fd2510c64345184d777f0b4c2ed2af552f66e9e8f4c69ea03fe6028071b13c"} Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.592542 4762 scope.go:117] "RemoveContainer" containerID="132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.613577 4762 scope.go:117] "RemoveContainer" containerID="cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.633565 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.638254 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kgg7f"] Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.658856 4762 scope.go:117] "RemoveContainer" containerID="1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.676090 4762 scope.go:117] "RemoveContainer" containerID="132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a" Oct 09 14:47:50 crc kubenswrapper[4762]: E1009 14:47:50.676580 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a\": container with ID starting with 132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a not found: ID does not exist" containerID="132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.676619 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a"} err="failed to get container status \"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a\": rpc error: code = NotFound desc = could not find container \"132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a\": container with ID starting with 132a7a6455666fb2d13c4ec026d19f106fd4cc5d181aafeb54541c2f5ce7cf7a not found: ID does not exist" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.676653 4762 scope.go:117] "RemoveContainer" containerID="cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d" Oct 09 14:47:50 crc kubenswrapper[4762]: E1009 14:47:50.676927 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d\": container with ID starting with cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d not found: ID does not exist" containerID="cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.676973 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d"} err="failed to get container status \"cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d\": rpc error: code = NotFound desc = could not find container \"cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d\": container with ID starting with cae61ee609d60d1b57a620f218ad1fe21271e4302f35a4d5dfb77f210091d79d not found: ID does not exist" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.677007 4762 scope.go:117] "RemoveContainer" containerID="1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5" Oct 09 14:47:50 crc kubenswrapper[4762]: E1009 14:47:50.677412 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5\": container with ID starting with 1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5 not found: ID does not exist" containerID="1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.677440 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5"} err="failed to get container status \"1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5\": rpc error: code = NotFound desc = could not find container \"1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5\": container with ID starting with 1e7df90b4f933a763aae3bddde0c36723e0829a80f7537362f6dbbf19616e0e5 not found: ID does not exist" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.973868 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" path="/var/lib/kubelet/pods/7c0606a9-0001-4f47-997f-194faab9d68a/volumes" Oct 09 14:47:50 crc kubenswrapper[4762]: I1009 14:47:50.974953 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f136553c-e186-4706-9b54-ded4838711b4" path="/var/lib/kubelet/pods/f136553c-e186-4706-9b54-ded4838711b4/volumes" Oct 09 14:48:31 crc kubenswrapper[4762]: I1009 14:48:31.604715 4762 scope.go:117] "RemoveContainer" containerID="a0cab21376028a3574a4ea5be7cc483bf0cb7aeaec3a6bb6a6f0fd4853e56ef5" Oct 09 14:49:11 crc kubenswrapper[4762]: I1009 14:49:11.969267 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:49:11 crc kubenswrapper[4762]: I1009 14:49:11.969872 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:49:41 crc kubenswrapper[4762]: I1009 14:49:41.969394 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:49:41 crc kubenswrapper[4762]: I1009 14:49:41.969921 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:50:11 crc kubenswrapper[4762]: I1009 14:50:11.969973 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:50:11 crc kubenswrapper[4762]: I1009 14:50:11.970574 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:50:11 crc kubenswrapper[4762]: I1009 14:50:11.970655 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:50:11 crc kubenswrapper[4762]: I1009 14:50:11.971248 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:50:11 crc kubenswrapper[4762]: I1009 14:50:11.971328 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5" gracePeriod=600 Oct 09 14:50:12 crc kubenswrapper[4762]: I1009 14:50:12.726802 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5" exitCode=0 Oct 09 14:50:12 crc kubenswrapper[4762]: I1009 14:50:12.726873 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5"} Oct 09 14:50:12 crc kubenswrapper[4762]: I1009 14:50:12.726970 4762 scope.go:117] "RemoveContainer" containerID="bd93ac77fa82ac7ac75985b30cf5b2de69fe1e0a93c6fcc548b0b2dd005aa70c" Oct 09 14:50:13 crc kubenswrapper[4762]: I1009 14:50:13.739283 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735"} Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.204104 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205164 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205180 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205201 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205208 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205270 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205280 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205295 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205302 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205313 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24274578-a57d-47d9-bf81-49d83c11452d" containerName="mariadb-client-2" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205320 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24274578-a57d-47d9-bf81-49d83c11452d" containerName="mariadb-client-2" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205337 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205345 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="extract-utilities" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205356 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205363 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205377 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205385 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205400 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205408 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: E1009 14:51:50.205432 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205439 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="extract-content" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205659 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c0606a9-0001-4f47-997f-194faab9d68a" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205672 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f136553c-e186-4706-9b54-ded4838711b4" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205688 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b17ffb5-31c0-4737-a8fa-9c8138af2c25" containerName="registry-server" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.205702 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="24274578-a57d-47d9-bf81-49d83c11452d" containerName="mariadb-client-2" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.206367 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.209235 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gjg68" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.233362 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.272822 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.273256 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwdqn\" (UniqueName: \"kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.374118 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.374171 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwdqn\" (UniqueName: \"kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.377520 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.377567 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9988a9a0f9c26b099b93a7cd41ff0969bc0697bf4b67c7a351fd092351cc8fad/globalmount\"" pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.396933 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwdqn\" (UniqueName: \"kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.417725 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") pod \"mariadb-copy-data\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " pod="openstack/mariadb-copy-data" Oct 09 14:51:50 crc kubenswrapper[4762]: I1009 14:51:50.553913 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 09 14:51:51 crc kubenswrapper[4762]: I1009 14:51:51.049317 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 14:51:51 crc kubenswrapper[4762]: W1009 14:51:51.056235 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84d69cfe_07e8_45e6_95b4_05bf716a658c.slice/crio-244c31ca3dd8b35c1dd50d5526e5b265715da2563d1740bf3bbaa2d1b66349ea WatchSource:0}: Error finding container 244c31ca3dd8b35c1dd50d5526e5b265715da2563d1740bf3bbaa2d1b66349ea: Status 404 returned error can't find the container with id 244c31ca3dd8b35c1dd50d5526e5b265715da2563d1740bf3bbaa2d1b66349ea Oct 09 14:51:51 crc kubenswrapper[4762]: I1009 14:51:51.527323 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"84d69cfe-07e8-45e6-95b4-05bf716a658c","Type":"ContainerStarted","Data":"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74"} Oct 09 14:51:51 crc kubenswrapper[4762]: I1009 14:51:51.527605 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"84d69cfe-07e8-45e6-95b4-05bf716a658c","Type":"ContainerStarted","Data":"244c31ca3dd8b35c1dd50d5526e5b265715da2563d1740bf3bbaa2d1b66349ea"} Oct 09 14:51:51 crc kubenswrapper[4762]: I1009 14:51:51.548167 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.548146292 podStartE2EDuration="2.548146292s" podCreationTimestamp="2025-10-09 14:51:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:51:51.543257884 +0000 UTC m=+5187.317048963" watchObservedRunningTime="2025-10-09 14:51:51.548146292 +0000 UTC m=+5187.321937331" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.293384 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.304082 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.325609 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.429550 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmpq7\" (UniqueName: \"kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7\") pod \"mariadb-client\" (UID: \"cb2927a6-2c62-429a-b7cf-089dd1f24186\") " pod="openstack/mariadb-client" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.531509 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmpq7\" (UniqueName: \"kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7\") pod \"mariadb-client\" (UID: \"cb2927a6-2c62-429a-b7cf-089dd1f24186\") " pod="openstack/mariadb-client" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.556993 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmpq7\" (UniqueName: \"kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7\") pod \"mariadb-client\" (UID: \"cb2927a6-2c62-429a-b7cf-089dd1f24186\") " pod="openstack/mariadb-client" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.633367 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:53 crc kubenswrapper[4762]: I1009 14:51:53.830947 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:54 crc kubenswrapper[4762]: I1009 14:51:54.549570 4762 generic.go:334] "Generic (PLEG): container finished" podID="cb2927a6-2c62-429a-b7cf-089dd1f24186" containerID="37f67fdb760e1fac6c232f660ec7778d29cf95da2c5b3ec9fa69c69d67ce2335" exitCode=0 Oct 09 14:51:54 crc kubenswrapper[4762]: I1009 14:51:54.549618 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"cb2927a6-2c62-429a-b7cf-089dd1f24186","Type":"ContainerDied","Data":"37f67fdb760e1fac6c232f660ec7778d29cf95da2c5b3ec9fa69c69d67ce2335"} Oct 09 14:51:54 crc kubenswrapper[4762]: I1009 14:51:54.549696 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"cb2927a6-2c62-429a-b7cf-089dd1f24186","Type":"ContainerStarted","Data":"7da476913a4954b0427564248920b26b32bbad299bcd77901c87bf3b109f8640"} Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.876620 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.903186 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_cb2927a6-2c62-429a-b7cf-089dd1f24186/mariadb-client/0.log" Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.926604 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.933380 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.968038 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmpq7\" (UniqueName: \"kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7\") pod \"cb2927a6-2c62-429a-b7cf-089dd1f24186\" (UID: \"cb2927a6-2c62-429a-b7cf-089dd1f24186\") " Oct 09 14:51:55 crc kubenswrapper[4762]: I1009 14:51:55.973212 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7" (OuterVolumeSpecName: "kube-api-access-fmpq7") pod "cb2927a6-2c62-429a-b7cf-089dd1f24186" (UID: "cb2927a6-2c62-429a-b7cf-089dd1f24186"). InnerVolumeSpecName "kube-api-access-fmpq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.047536 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:56 crc kubenswrapper[4762]: E1009 14:51:56.048172 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2927a6-2c62-429a-b7cf-089dd1f24186" containerName="mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.048194 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2927a6-2c62-429a-b7cf-089dd1f24186" containerName="mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.048341 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb2927a6-2c62-429a-b7cf-089dd1f24186" containerName="mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.049039 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.064732 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.069847 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmpq7\" (UniqueName: \"kubernetes.io/projected/cb2927a6-2c62-429a-b7cf-089dd1f24186-kube-api-access-fmpq7\") on node \"crc\" DevicePath \"\"" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.171772 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpfwt\" (UniqueName: \"kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt\") pod \"mariadb-client\" (UID: \"d0368d9d-d44c-4d87-8646-fb9daef43ee5\") " pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.273378 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpfwt\" (UniqueName: \"kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt\") pod \"mariadb-client\" (UID: \"d0368d9d-d44c-4d87-8646-fb9daef43ee5\") " pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.290857 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpfwt\" (UniqueName: \"kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt\") pod \"mariadb-client\" (UID: \"d0368d9d-d44c-4d87-8646-fb9daef43ee5\") " pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.371772 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.568701 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7da476913a4954b0427564248920b26b32bbad299bcd77901c87bf3b109f8640" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.568829 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.590440 4762 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="cb2927a6-2c62-429a-b7cf-089dd1f24186" podUID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.841770 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:56 crc kubenswrapper[4762]: W1009 14:51:56.846318 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0368d9d_d44c_4d87_8646_fb9daef43ee5.slice/crio-4f0381b9a27dcdbe9b9268724102caab93554390bde2da186f14268aad0c322a WatchSource:0}: Error finding container 4f0381b9a27dcdbe9b9268724102caab93554390bde2da186f14268aad0c322a: Status 404 returned error can't find the container with id 4f0381b9a27dcdbe9b9268724102caab93554390bde2da186f14268aad0c322a Oct 09 14:51:56 crc kubenswrapper[4762]: I1009 14:51:56.980250 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb2927a6-2c62-429a-b7cf-089dd1f24186" path="/var/lib/kubelet/pods/cb2927a6-2c62-429a-b7cf-089dd1f24186/volumes" Oct 09 14:51:57 crc kubenswrapper[4762]: I1009 14:51:57.579604 4762 generic.go:334] "Generic (PLEG): container finished" podID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" containerID="446ce32f085ff8ba0dbc0de3c89c47b0994580e08b0e9b95e5241f3ccbbf422c" exitCode=0 Oct 09 14:51:57 crc kubenswrapper[4762]: I1009 14:51:57.579676 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d0368d9d-d44c-4d87-8646-fb9daef43ee5","Type":"ContainerDied","Data":"446ce32f085ff8ba0dbc0de3c89c47b0994580e08b0e9b95e5241f3ccbbf422c"} Oct 09 14:51:57 crc kubenswrapper[4762]: I1009 14:51:57.579719 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d0368d9d-d44c-4d87-8646-fb9daef43ee5","Type":"ContainerStarted","Data":"4f0381b9a27dcdbe9b9268724102caab93554390bde2da186f14268aad0c322a"} Oct 09 14:51:58 crc kubenswrapper[4762]: I1009 14:51:58.938002 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:51:58 crc kubenswrapper[4762]: I1009 14:51:58.956717 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_d0368d9d-d44c-4d87-8646-fb9daef43ee5/mariadb-client/0.log" Oct 09 14:51:58 crc kubenswrapper[4762]: I1009 14:51:58.980955 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:58 crc kubenswrapper[4762]: I1009 14:51:58.985335 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 09 14:51:59 crc kubenswrapper[4762]: I1009 14:51:59.018375 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpfwt\" (UniqueName: \"kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt\") pod \"d0368d9d-d44c-4d87-8646-fb9daef43ee5\" (UID: \"d0368d9d-d44c-4d87-8646-fb9daef43ee5\") " Oct 09 14:51:59 crc kubenswrapper[4762]: I1009 14:51:59.023826 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt" (OuterVolumeSpecName: "kube-api-access-dpfwt") pod "d0368d9d-d44c-4d87-8646-fb9daef43ee5" (UID: "d0368d9d-d44c-4d87-8646-fb9daef43ee5"). InnerVolumeSpecName "kube-api-access-dpfwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:51:59 crc kubenswrapper[4762]: I1009 14:51:59.120607 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpfwt\" (UniqueName: \"kubernetes.io/projected/d0368d9d-d44c-4d87-8646-fb9daef43ee5-kube-api-access-dpfwt\") on node \"crc\" DevicePath \"\"" Oct 09 14:51:59 crc kubenswrapper[4762]: I1009 14:51:59.598135 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f0381b9a27dcdbe9b9268724102caab93554390bde2da186f14268aad0c322a" Oct 09 14:51:59 crc kubenswrapper[4762]: I1009 14:51:59.598283 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 09 14:52:00 crc kubenswrapper[4762]: I1009 14:52:00.985154 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" path="/var/lib/kubelet/pods/d0368d9d-d44c-4d87-8646-fb9daef43ee5/volumes" Oct 09 14:52:41 crc kubenswrapper[4762]: I1009 14:52:41.969297 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:52:41 crc kubenswrapper[4762]: I1009 14:52:41.969882 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.493574 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 14:53:08 crc kubenswrapper[4762]: E1009 14:53:08.494575 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" containerName="mariadb-client" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.494599 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" containerName="mariadb-client" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.494799 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0368d9d-d44c-4d87-8646-fb9daef43ee5" containerName="mariadb-client" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.495716 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.499817 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.501479 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.502087 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-cvmcv" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.506198 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.513482 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.515133 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.521391 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.522947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.531215 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.538857 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601809 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601869 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601885 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-config\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601929 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f94f81-220f-4213-a416-5a1c2b4a5f3d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601950 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b46fx\" (UniqueName: \"kubernetes.io/projected/18f94f81-220f-4213-a416-5a1c2b4a5f3d-kube-api-access-b46fx\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.601974 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/18f94f81-220f-4213-a416-5a1c2b4a5f3d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.679903 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.681449 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.686073 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.686785 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-k92cl" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.688005 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.694470 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703019 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703093 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703115 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmvp6\" (UniqueName: \"kubernetes.io/projected/d4d58842-51b7-4201-b882-ce8f78057a12-kube-api-access-zmvp6\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703140 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703168 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d4d58842-51b7-4201-b882-ce8f78057a12-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703210 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703259 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-config\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703282 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703305 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703337 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703360 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d58842-51b7-4201-b882-ce8f78057a12-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703386 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f94f81-220f-4213-a416-5a1c2b4a5f3d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703405 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-config\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703430 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b46fx\" (UniqueName: \"kubernetes.io/projected/18f94f81-220f-4213-a416-5a1c2b4a5f3d-kube-api-access-b46fx\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703453 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2cdx\" (UniqueName: \"kubernetes.io/projected/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-kube-api-access-f2cdx\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703475 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-config\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.703499 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/18f94f81-220f-4213-a416-5a1c2b4a5f3d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.704008 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/18f94f81-220f-4213-a416-5a1c2b4a5f3d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.705407 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-config\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.707439 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18f94f81-220f-4213-a416-5a1c2b4a5f3d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.711360 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.712818 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.713224 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18f94f81-220f-4213-a416-5a1c2b4a5f3d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.718316 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.719724 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.728849 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.728895 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2e883b4295d51dd652de43d92b8c191b0019d65899ceb28f5ef3a11b3ed6d7c1/globalmount\"" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.730240 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b46fx\" (UniqueName: \"kubernetes.io/projected/18f94f81-220f-4213-a416-5a1c2b4a5f3d-kube-api-access-b46fx\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.732600 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.745482 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.771310 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba73de0a-ed67-4f76-8162-2b29104ab229\") pod \"ovsdbserver-sb-0\" (UID: \"18f94f81-220f-4213-a416-5a1c2b4a5f3d\") " pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804325 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50181ec3-4201-4f45-aa83-956765556089-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804399 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmvp6\" (UniqueName: \"kubernetes.io/projected/d4d58842-51b7-4201-b882-ce8f78057a12-kube-api-access-zmvp6\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804541 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-config\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804678 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwqxj\" (UniqueName: \"kubernetes.io/projected/3e89ac68-797b-44dc-9cae-772ebfc5eca6-kube-api-access-pwqxj\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804737 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50735fb3-df1e-4342-ad26-07c3d0122688-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804790 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804825 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d4d58842-51b7-4201-b882-ce8f78057a12-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804863 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804902 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e89ac68-797b-44dc-9cae-772ebfc5eca6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804927 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.804959 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50181ec3-4201-4f45-aa83-956765556089-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805012 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805052 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805069 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-03d4931e-1ba8-4e12-a058-339735c752c1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03d4931e-1ba8-4e12-a058-339735c752c1\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805122 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdk95\" (UniqueName: \"kubernetes.io/projected/50735fb3-df1e-4342-ad26-07c3d0122688-kube-api-access-kdk95\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805895 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d4d58842-51b7-4201-b882-ce8f78057a12-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.805940 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806007 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50735fb3-df1e-4342-ad26-07c3d0122688-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806058 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806100 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806123 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806148 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d58842-51b7-4201-b882-ce8f78057a12-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806205 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shhsl\" (UniqueName: \"kubernetes.io/projected/50181ec3-4201-4f45-aa83-956765556089-kube-api-access-shhsl\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806226 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-config\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806270 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e89ac68-797b-44dc-9cae-772ebfc5eca6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806295 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2cdx\" (UniqueName: \"kubernetes.io/projected/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-kube-api-access-f2cdx\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806335 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-config\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806365 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806388 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806448 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-598896e8-5145-47d1-897f-42a310dfc842\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-598896e8-5145-47d1-897f-42a310dfc842\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.806869 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-config\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.807447 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.807900 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.809153 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-config\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.810336 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4d58842-51b7-4201-b882-ce8f78057a12-config\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.810903 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.810941 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e7597c9307183f802cd680b40e2aa8bc9e0e814dd050e8e651242cc5ece3f1a2/globalmount\"" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.811267 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.811314 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/15ebbfe6f22100baddfab562fc2ddd26f332afee38123307d2e42dae8809c589/globalmount\"" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.811954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d58842-51b7-4201-b882-ce8f78057a12-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.811978 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.817166 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.820616 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmvp6\" (UniqueName: \"kubernetes.io/projected/d4d58842-51b7-4201-b882-ce8f78057a12-kube-api-access-zmvp6\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.828017 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2cdx\" (UniqueName: \"kubernetes.io/projected/a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2-kube-api-access-f2cdx\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.861075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b28733f4-25ce-4cac-9301-0b211f9ad8bf\") pod \"ovsdbserver-sb-1\" (UID: \"d4d58842-51b7-4201-b882-ce8f78057a12\") " pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.865118 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7d44b765-cd82-4f33-b01a-b87368ebf775\") pod \"ovsdbserver-sb-2\" (UID: \"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2\") " pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916526 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916596 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-598896e8-5145-47d1-897f-42a310dfc842\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-598896e8-5145-47d1-897f-42a310dfc842\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916653 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-config\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916693 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50181ec3-4201-4f45-aa83-956765556089-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916740 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-config\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.916767 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwqxj\" (UniqueName: \"kubernetes.io/projected/3e89ac68-797b-44dc-9cae-772ebfc5eca6-kube-api-access-pwqxj\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917429 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50735fb3-df1e-4342-ad26-07c3d0122688-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917472 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917501 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e89ac68-797b-44dc-9cae-772ebfc5eca6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917524 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917547 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50181ec3-4201-4f45-aa83-956765556089-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917585 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-03d4931e-1ba8-4e12-a058-339735c752c1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03d4931e-1ba8-4e12-a058-339735c752c1\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917612 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdk95\" (UniqueName: \"kubernetes.io/projected/50735fb3-df1e-4342-ad26-07c3d0122688-kube-api-access-kdk95\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917659 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50735fb3-df1e-4342-ad26-07c3d0122688-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917684 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917722 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917756 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shhsl\" (UniqueName: \"kubernetes.io/projected/50181ec3-4201-4f45-aa83-956765556089-kube-api-access-shhsl\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.917783 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e89ac68-797b-44dc-9cae-772ebfc5eca6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.919802 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50181ec3-4201-4f45-aa83-956765556089-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.921386 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.921572 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.921670 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/50735fb3-df1e-4342-ad26-07c3d0122688-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.921834 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.921899 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-598896e8-5145-47d1-897f-42a310dfc842\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-598896e8-5145-47d1-897f-42a310dfc842\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a772320fe96c59406b5caa00e24a1f833cfda16b34ccaca50441711f6187dabd/globalmount\"" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922068 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50181ec3-4201-4f45-aa83-956765556089-config\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922282 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922307 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5504ca1c2a6eb42c7929f95dc56f1cd05434587c19448eee175e507634c5ee8d/globalmount\"" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922459 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922488 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-03d4931e-1ba8-4e12-a058-339735c752c1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03d4931e-1ba8-4e12-a058-339735c752c1\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5fd4e1311e887a2b9a94f7418a3122324db2a74b6eca0a2a9bb67b5689b1330a/globalmount\"" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.922917 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.923116 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3e89ac68-797b-44dc-9cae-772ebfc5eca6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.923438 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e89ac68-797b-44dc-9cae-772ebfc5eca6-config\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.925438 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50735fb3-df1e-4342-ad26-07c3d0122688-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.925565 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e89ac68-797b-44dc-9cae-772ebfc5eca6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.928799 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50735fb3-df1e-4342-ad26-07c3d0122688-config\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.929516 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50181ec3-4201-4f45-aa83-956765556089-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.937904 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwqxj\" (UniqueName: \"kubernetes.io/projected/3e89ac68-797b-44dc-9cae-772ebfc5eca6-kube-api-access-pwqxj\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.943232 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shhsl\" (UniqueName: \"kubernetes.io/projected/50181ec3-4201-4f45-aa83-956765556089-kube-api-access-shhsl\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.944824 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdk95\" (UniqueName: \"kubernetes.io/projected/50735fb3-df1e-4342-ad26-07c3d0122688-kube-api-access-kdk95\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.955533 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-03d4931e-1ba8-4e12-a058-339735c752c1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-03d4931e-1ba8-4e12-a058-339735c752c1\") pod \"ovsdbserver-nb-0\" (UID: \"3e89ac68-797b-44dc-9cae-772ebfc5eca6\") " pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.956506 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06b7fb74-a3c3-4139-87b7-9bf7e5acc72c\") pod \"ovsdbserver-nb-1\" (UID: \"50735fb3-df1e-4342-ad26-07c3d0122688\") " pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:08 crc kubenswrapper[4762]: I1009 14:53:08.960349 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-598896e8-5145-47d1-897f-42a310dfc842\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-598896e8-5145-47d1-897f-42a310dfc842\") pod \"ovsdbserver-nb-2\" (UID: \"50181ec3-4201-4f45-aa83-956765556089\") " pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.004651 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.115245 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.136571 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.149150 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.199125 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.386607 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.556495 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 14:53:09 crc kubenswrapper[4762]: W1009 14:53:09.644395 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4d58842_51b7_4201_b882_ce8f78057a12.slice/crio-7b149d6feb2a232108e82e0a30b750ef2eafd38e19df66069e0331367a0bf864 WatchSource:0}: Error finding container 7b149d6feb2a232108e82e0a30b750ef2eafd38e19df66069e0331367a0bf864: Status 404 returned error can't find the container with id 7b149d6feb2a232108e82e0a30b750ef2eafd38e19df66069e0331367a0bf864 Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.645484 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 09 14:53:09 crc kubenswrapper[4762]: I1009 14:53:09.704544 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 09 14:53:09 crc kubenswrapper[4762]: W1009 14:53:09.709168 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50735fb3_df1e_4342_ad26_07c3d0122688.slice/crio-a52ef32da075943e6b3f85eb5fba91772f59be874bcb06a3849a2ca64dbbe60c WatchSource:0}: Error finding container a52ef32da075943e6b3f85eb5fba91772f59be874bcb06a3849a2ca64dbbe60c: Status 404 returned error can't find the container with id a52ef32da075943e6b3f85eb5fba91772f59be874bcb06a3849a2ca64dbbe60c Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.219027 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e89ac68-797b-44dc-9cae-772ebfc5eca6","Type":"ContainerStarted","Data":"1f56fa9debd13909b2ed3957e8335fcac201e34d7d39305ab2b70bd1436872f3"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.219078 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e89ac68-797b-44dc-9cae-772ebfc5eca6","Type":"ContainerStarted","Data":"7253430c2b55a55ee3df656da6961933465bbb42d370aed99878c934d01bacad"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.219090 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3e89ac68-797b-44dc-9cae-772ebfc5eca6","Type":"ContainerStarted","Data":"1461823e4a7010f23a6678aaba859d52333d631f03d0671f1fac6764b6fc1c48"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.224316 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"18f94f81-220f-4213-a416-5a1c2b4a5f3d","Type":"ContainerStarted","Data":"d729b8019d613139044380c477da5ed5bbb42a61810df85e4ac2fc7f083dfac5"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.224569 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"18f94f81-220f-4213-a416-5a1c2b4a5f3d","Type":"ContainerStarted","Data":"bcb7718fa8bc6ea6d0417ea453f7cebe9e0cace2e81037b8040208cba79014e3"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.224692 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"18f94f81-220f-4213-a416-5a1c2b4a5f3d","Type":"ContainerStarted","Data":"cd0920dd842360b086319877d5a338c883acf5d18e63bdd7ed9e5a6db8eb2e14"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.228021 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"50735fb3-df1e-4342-ad26-07c3d0122688","Type":"ContainerStarted","Data":"f14ebde32e210bf7b4304870ae9b5fb2ee4bd153cba722201ec54a75904599b1"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.228151 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"50735fb3-df1e-4342-ad26-07c3d0122688","Type":"ContainerStarted","Data":"a52ef32da075943e6b3f85eb5fba91772f59be874bcb06a3849a2ca64dbbe60c"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.235793 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"d4d58842-51b7-4201-b882-ce8f78057a12","Type":"ContainerStarted","Data":"88a3faa4a6bb8c635b43846ba090cdda9fa6886770bb7bd91b2c94132031271c"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.235901 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"d4d58842-51b7-4201-b882-ce8f78057a12","Type":"ContainerStarted","Data":"7b149d6feb2a232108e82e0a30b750ef2eafd38e19df66069e0331367a0bf864"} Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.252426 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.252400823 podStartE2EDuration="3.252400823s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:10.248167523 +0000 UTC m=+5266.021958562" watchObservedRunningTime="2025-10-09 14:53:10.252400823 +0000 UTC m=+5266.026191862" Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.280620 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.280595069 podStartE2EDuration="3.280595069s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:10.275519856 +0000 UTC m=+5266.049310905" watchObservedRunningTime="2025-10-09 14:53:10.280595069 +0000 UTC m=+5266.054386108" Oct 09 14:53:10 crc kubenswrapper[4762]: W1009 14:53:10.332051 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda94dcf0b_969d_4d7f_b3f4_ab97475e5bf2.slice/crio-0c5def2cff86dcc36f12440a2e3e390e034521d43ee64360cb99767152314d6e WatchSource:0}: Error finding container 0c5def2cff86dcc36f12440a2e3e390e034521d43ee64360cb99767152314d6e: Status 404 returned error can't find the container with id 0c5def2cff86dcc36f12440a2e3e390e034521d43ee64360cb99767152314d6e Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.332281 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 09 14:53:10 crc kubenswrapper[4762]: I1009 14:53:10.444968 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 09 14:53:10 crc kubenswrapper[4762]: W1009 14:53:10.447489 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50181ec3_4201_4f45_aa83_956765556089.slice/crio-7eea04ce1c0d2fc0f134a100073762d598f84b220ccb4ad8a657dbfca57cdc7a WatchSource:0}: Error finding container 7eea04ce1c0d2fc0f134a100073762d598f84b220ccb4ad8a657dbfca57cdc7a: Status 404 returned error can't find the container with id 7eea04ce1c0d2fc0f134a100073762d598f84b220ccb4ad8a657dbfca57cdc7a Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.246412 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"50735fb3-df1e-4342-ad26-07c3d0122688","Type":"ContainerStarted","Data":"d22b3d71b1d8a62aa6df9f2e8daccb06d7d4093847f6d667780b63437664c428"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.248915 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"d4d58842-51b7-4201-b882-ce8f78057a12","Type":"ContainerStarted","Data":"3041564a1fd1eeeff86b639cb2326b3bb13550e10a41f44de85e5c9b166b7dcf"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.252841 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"50181ec3-4201-4f45-aa83-956765556089","Type":"ContainerStarted","Data":"4b71ce3f5bb9bf7c28ca613bfc4c5dd85fe59700ec6af2098ebe91d7285e3ba1"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.252916 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"50181ec3-4201-4f45-aa83-956765556089","Type":"ContainerStarted","Data":"1f21b5f565b482a5272aaf184bd2ef1d28be9790c2f652a95328e075027e2e9e"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.252932 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"50181ec3-4201-4f45-aa83-956765556089","Type":"ContainerStarted","Data":"7eea04ce1c0d2fc0f134a100073762d598f84b220ccb4ad8a657dbfca57cdc7a"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.255848 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2","Type":"ContainerStarted","Data":"303797ad2009817237c1446e485e71109061b044e61ebf50a6d3f520a9c2ed23"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.255919 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2","Type":"ContainerStarted","Data":"9c7a983b5614ddaf2acf80c3c8d6f75dbb847afa7859c7e732959916afe5915b"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.255947 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2","Type":"ContainerStarted","Data":"0c5def2cff86dcc36f12440a2e3e390e034521d43ee64360cb99767152314d6e"} Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.273339 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.273319501 podStartE2EDuration="4.273319501s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:11.270131037 +0000 UTC m=+5267.043922116" watchObservedRunningTime="2025-10-09 14:53:11.273319501 +0000 UTC m=+5267.047110540" Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.298200 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.298176368 podStartE2EDuration="4.298176368s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:11.292227644 +0000 UTC m=+5267.066018703" watchObservedRunningTime="2025-10-09 14:53:11.298176368 +0000 UTC m=+5267.071967427" Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.335815 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.335795799 podStartE2EDuration="4.335795799s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:11.33389466 +0000 UTC m=+5267.107685699" watchObservedRunningTime="2025-10-09 14:53:11.335795799 +0000 UTC m=+5267.109586838" Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.337715 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.337707359 podStartE2EDuration="4.337707359s" podCreationTimestamp="2025-10-09 14:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:11.320460089 +0000 UTC m=+5267.094251128" watchObservedRunningTime="2025-10-09 14:53:11.337707359 +0000 UTC m=+5267.111498398" Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.818096 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.969149 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:53:11 crc kubenswrapper[4762]: I1009 14:53:11.969253 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.005472 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.050226 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.116690 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.137452 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.148779 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.200556 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:12 crc kubenswrapper[4762]: I1009 14:53:12.266245 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:13 crc kubenswrapper[4762]: I1009 14:53:13.817721 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.051819 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.116818 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.137223 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.149248 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.199850 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.312273 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.316181 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.320290 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.324904 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.428591 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bm25\" (UniqueName: \"kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.429016 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.429090 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.429145 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.530348 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.530404 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.530438 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.530465 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bm25\" (UniqueName: \"kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.532862 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.534274 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.534728 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.554468 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bm25\" (UniqueName: \"kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25\") pod \"dnsmasq-dns-5f7b874789-66mtd\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.644345 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.869370 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:14 crc kubenswrapper[4762]: I1009 14:53:14.918379 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.071686 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.188451 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.189164 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.230971 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.247563 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.265331 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.266876 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.271928 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.282892 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.302508 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" event={"ID":"75e025b4-af51-46a2-ac29-2517cf416f96","Type":"ContainerStarted","Data":"f6cd7248e610fcebcfa92389ccd58b0dd4e5b5333193a4cbfec696d007fb542a"} Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.324095 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.344133 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.344470 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.344579 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2h2f\" (UniqueName: \"kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.344720 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.345277 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.395019 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.397147 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.408576 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.412159 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.451429 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.451510 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.451790 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.451828 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.451866 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2h2f\" (UniqueName: \"kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.452828 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.453002 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.453748 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.453805 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.473147 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2h2f\" (UniqueName: \"kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f\") pod \"dnsmasq-dns-7c9c99d997-d6f7h\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:15 crc kubenswrapper[4762]: I1009 14:53:15.588761 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.039156 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.310920 4762 generic.go:334] "Generic (PLEG): container finished" podID="75e025b4-af51-46a2-ac29-2517cf416f96" containerID="3e0f7017fcaf8d93268b5f0a1a1d28a9a8a2cec0250d2ae690a4a82544989886" exitCode=0 Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.311039 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" event={"ID":"75e025b4-af51-46a2-ac29-2517cf416f96","Type":"ContainerDied","Data":"3e0f7017fcaf8d93268b5f0a1a1d28a9a8a2cec0250d2ae690a4a82544989886"} Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.314625 4762 generic.go:334] "Generic (PLEG): container finished" podID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerID="28babd730a097fafc9ec51cf64e42fd5df1a8b727d1b7952076d72161a3b3198" exitCode=0 Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.314756 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" event={"ID":"f2f20c73-29a3-48a9-bbf4-3a8677b631ee","Type":"ContainerDied","Data":"28babd730a097fafc9ec51cf64e42fd5df1a8b727d1b7952076d72161a3b3198"} Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.314804 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" event={"ID":"f2f20c73-29a3-48a9-bbf4-3a8677b631ee","Type":"ContainerStarted","Data":"a9d1657c4e2c3116a8718d6c574c92ba6cbbff3d9c89b86f44b69e025629af84"} Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.551262 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.669257 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config\") pod \"75e025b4-af51-46a2-ac29-2517cf416f96\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.669338 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc\") pod \"75e025b4-af51-46a2-ac29-2517cf416f96\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.669370 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb\") pod \"75e025b4-af51-46a2-ac29-2517cf416f96\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.669453 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bm25\" (UniqueName: \"kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25\") pod \"75e025b4-af51-46a2-ac29-2517cf416f96\" (UID: \"75e025b4-af51-46a2-ac29-2517cf416f96\") " Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.673144 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25" (OuterVolumeSpecName: "kube-api-access-6bm25") pod "75e025b4-af51-46a2-ac29-2517cf416f96" (UID: "75e025b4-af51-46a2-ac29-2517cf416f96"). InnerVolumeSpecName "kube-api-access-6bm25". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.689000 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75e025b4-af51-46a2-ac29-2517cf416f96" (UID: "75e025b4-af51-46a2-ac29-2517cf416f96"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.689985 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config" (OuterVolumeSpecName: "config") pod "75e025b4-af51-46a2-ac29-2517cf416f96" (UID: "75e025b4-af51-46a2-ac29-2517cf416f96"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.692808 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "75e025b4-af51-46a2-ac29-2517cf416f96" (UID: "75e025b4-af51-46a2-ac29-2517cf416f96"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.771679 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.771712 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.771726 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75e025b4-af51-46a2-ac29-2517cf416f96-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:16 crc kubenswrapper[4762]: I1009 14:53:16.771741 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bm25\" (UniqueName: \"kubernetes.io/projected/75e025b4-af51-46a2-ac29-2517cf416f96-kube-api-access-6bm25\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.325142 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" event={"ID":"75e025b4-af51-46a2-ac29-2517cf416f96","Type":"ContainerDied","Data":"f6cd7248e610fcebcfa92389ccd58b0dd4e5b5333193a4cbfec696d007fb542a"} Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.325203 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7b874789-66mtd" Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.325577 4762 scope.go:117] "RemoveContainer" containerID="3e0f7017fcaf8d93268b5f0a1a1d28a9a8a2cec0250d2ae690a4a82544989886" Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.328092 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" event={"ID":"f2f20c73-29a3-48a9-bbf4-3a8677b631ee","Type":"ContainerStarted","Data":"02adad4bf412503c556a0a26c6455a6ec4f87c56b89a8567590effd52a2207a3"} Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.329126 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.354001 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" podStartSLOduration=2.353981296 podStartE2EDuration="2.353981296s" podCreationTimestamp="2025-10-09 14:53:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:17.352736454 +0000 UTC m=+5273.126527493" watchObservedRunningTime="2025-10-09 14:53:17.353981296 +0000 UTC m=+5273.127772335" Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.408762 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:17 crc kubenswrapper[4762]: I1009 14:53:17.415068 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f7b874789-66mtd"] Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.393199 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Oct 09 14:53:18 crc kubenswrapper[4762]: E1009 14:53:18.393540 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75e025b4-af51-46a2-ac29-2517cf416f96" containerName="init" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.393554 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="75e025b4-af51-46a2-ac29-2517cf416f96" containerName="init" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.393727 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="75e025b4-af51-46a2-ac29-2517cf416f96" containerName="init" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.394264 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.396751 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.409091 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.514929 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.514982 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.515075 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2cw8\" (UniqueName: \"kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.616506 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2cw8\" (UniqueName: \"kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.616647 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.616674 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.619833 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.619881 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f4dd068daa186d1c08338f7a3d1cfe5d69030a6614cde717d2769d999f3763b5/globalmount\"" pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.626043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.634690 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2cw8\" (UniqueName: \"kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.672071 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") pod \"ovn-copy-data\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.734866 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 09 14:53:18 crc kubenswrapper[4762]: I1009 14:53:18.978324 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75e025b4-af51-46a2-ac29-2517cf416f96" path="/var/lib/kubelet/pods/75e025b4-af51-46a2-ac29-2517cf416f96/volumes" Oct 09 14:53:19 crc kubenswrapper[4762]: I1009 14:53:19.233150 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 09 14:53:19 crc kubenswrapper[4762]: I1009 14:53:19.347435 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"1df9a5a3-088f-428d-873a-30b7b82507e9","Type":"ContainerStarted","Data":"12599279f93c2bffdf711cfcaf3998c2ba61cabf1a4951dbef74458167500bd4"} Oct 09 14:53:20 crc kubenswrapper[4762]: I1009 14:53:20.356675 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"1df9a5a3-088f-428d-873a-30b7b82507e9","Type":"ContainerStarted","Data":"79be7ffe98523fed7899eef46ecc9d0bbe47f0a54c1f2cce7eafe7eb8f6dd344"} Oct 09 14:53:20 crc kubenswrapper[4762]: I1009 14:53:20.371725 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.371709586 podStartE2EDuration="3.371709586s" podCreationTimestamp="2025-10-09 14:53:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:20.368192824 +0000 UTC m=+5276.141983863" watchObservedRunningTime="2025-10-09 14:53:20.371709586 +0000 UTC m=+5276.145500625" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.589782 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.648591 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.648903 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="dnsmasq-dns" containerID="cri-o://a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6" gracePeriod=10 Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.792546 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.804119 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.806593 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.807437 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-dz2f8" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.819417 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.823661 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.833849 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.833891 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.833946 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lcc4\" (UniqueName: \"kubernetes.io/projected/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-kube-api-access-8lcc4\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.833978 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-config\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.834034 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-scripts\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.935944 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lcc4\" (UniqueName: \"kubernetes.io/projected/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-kube-api-access-8lcc4\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.936015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-config\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.936090 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-scripts\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.936160 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.936195 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.937257 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-scripts\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.937771 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-config\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.945551 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.951852 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:25 crc kubenswrapper[4762]: I1009 14:53:25.968726 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lcc4\" (UniqueName: \"kubernetes.io/projected/aa9c1c46-0da0-4f63-b219-9d716f3eb48f-kube-api-access-8lcc4\") pod \"ovn-northd-0\" (UID: \"aa9c1c46-0da0-4f63-b219-9d716f3eb48f\") " pod="openstack/ovn-northd-0" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.148053 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.184902 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.240563 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc\") pod \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.240624 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjl4r\" (UniqueName: \"kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r\") pod \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.240776 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config\") pod \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\" (UID: \"a2a2847d-785a-4b3d-a7b6-852724bb6e22\") " Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.247161 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r" (OuterVolumeSpecName: "kube-api-access-zjl4r") pod "a2a2847d-785a-4b3d-a7b6-852724bb6e22" (UID: "a2a2847d-785a-4b3d-a7b6-852724bb6e22"). InnerVolumeSpecName "kube-api-access-zjl4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.291215 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a2a2847d-785a-4b3d-a7b6-852724bb6e22" (UID: "a2a2847d-785a-4b3d-a7b6-852724bb6e22"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.295127 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config" (OuterVolumeSpecName: "config") pod "a2a2847d-785a-4b3d-a7b6-852724bb6e22" (UID: "a2a2847d-785a-4b3d-a7b6-852724bb6e22"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.342453 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.342499 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a2a2847d-785a-4b3d-a7b6-852724bb6e22-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.342510 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjl4r\" (UniqueName: \"kubernetes.io/projected/a2a2847d-785a-4b3d-a7b6-852724bb6e22-kube-api-access-zjl4r\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.406530 4762 generic.go:334] "Generic (PLEG): container finished" podID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerID="a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6" exitCode=0 Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.406570 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" event={"ID":"a2a2847d-785a-4b3d-a7b6-852724bb6e22","Type":"ContainerDied","Data":"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6"} Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.406599 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" event={"ID":"a2a2847d-785a-4b3d-a7b6-852724bb6e22","Type":"ContainerDied","Data":"3bff25c12c79fb0c7e632e4f9ee187dc3924c6b5fc5bd941da725fd2b164ae5f"} Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.406620 4762 scope.go:117] "RemoveContainer" containerID="a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.406664 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-gxm4j" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.428011 4762 scope.go:117] "RemoveContainer" containerID="3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.442418 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.447691 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-gxm4j"] Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.458079 4762 scope.go:117] "RemoveContainer" containerID="a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6" Oct 09 14:53:26 crc kubenswrapper[4762]: E1009 14:53:26.458652 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6\": container with ID starting with a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6 not found: ID does not exist" containerID="a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.458718 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6"} err="failed to get container status \"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6\": rpc error: code = NotFound desc = could not find container \"a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6\": container with ID starting with a32732444341e17901a04f2b273af76d2111680797c492c5663f3ed076d93da6 not found: ID does not exist" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.458760 4762 scope.go:117] "RemoveContainer" containerID="3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c" Oct 09 14:53:26 crc kubenswrapper[4762]: E1009 14:53:26.459127 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c\": container with ID starting with 3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c not found: ID does not exist" containerID="3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.459157 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c"} err="failed to get container status \"3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c\": rpc error: code = NotFound desc = could not find container \"3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c\": container with ID starting with 3fc9e30a7cf53b8e75a03c60923de9486d2ed2ce51b5f5253736028b327ed40c not found: ID does not exist" Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.592326 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 14:53:26 crc kubenswrapper[4762]: W1009 14:53:26.595096 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa9c1c46_0da0_4f63_b219_9d716f3eb48f.slice/crio-6c57d7de6600a8abfd1ef09af8c070a71bf97bcc0ff5a66a243a8edbcdd96432 WatchSource:0}: Error finding container 6c57d7de6600a8abfd1ef09af8c070a71bf97bcc0ff5a66a243a8edbcdd96432: Status 404 returned error can't find the container with id 6c57d7de6600a8abfd1ef09af8c070a71bf97bcc0ff5a66a243a8edbcdd96432 Oct 09 14:53:26 crc kubenswrapper[4762]: I1009 14:53:26.981369 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" path="/var/lib/kubelet/pods/a2a2847d-785a-4b3d-a7b6-852724bb6e22/volumes" Oct 09 14:53:27 crc kubenswrapper[4762]: I1009 14:53:27.419994 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"aa9c1c46-0da0-4f63-b219-9d716f3eb48f","Type":"ContainerStarted","Data":"7f1e5e4169bd9fc92a2fefd650fbe19dee8aec6ef8f3bdbdbc09d86ff3f8ed0e"} Oct 09 14:53:27 crc kubenswrapper[4762]: I1009 14:53:27.420909 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"aa9c1c46-0da0-4f63-b219-9d716f3eb48f","Type":"ContainerStarted","Data":"855b4001394e031ebfde94af898290d6ce1f60b949d65585bc710abd14fd56cd"} Oct 09 14:53:27 crc kubenswrapper[4762]: I1009 14:53:27.420946 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"aa9c1c46-0da0-4f63-b219-9d716f3eb48f","Type":"ContainerStarted","Data":"6c57d7de6600a8abfd1ef09af8c070a71bf97bcc0ff5a66a243a8edbcdd96432"} Oct 09 14:53:27 crc kubenswrapper[4762]: I1009 14:53:27.420991 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 09 14:53:27 crc kubenswrapper[4762]: I1009 14:53:27.443710 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.443691678 podStartE2EDuration="2.443691678s" podCreationTimestamp="2025-10-09 14:53:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:27.441428159 +0000 UTC m=+5283.215219238" watchObservedRunningTime="2025-10-09 14:53:27.443691678 +0000 UTC m=+5283.217482717" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.118447 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-7bttc"] Oct 09 14:53:31 crc kubenswrapper[4762]: E1009 14:53:31.119144 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="dnsmasq-dns" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.119160 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="dnsmasq-dns" Oct 09 14:53:31 crc kubenswrapper[4762]: E1009 14:53:31.119176 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="init" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.119183 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="init" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.119366 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a2847d-785a-4b3d-a7b6-852724bb6e22" containerName="dnsmasq-dns" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.120045 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.131217 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7bttc"] Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.232248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7bb2\" (UniqueName: \"kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2\") pod \"keystone-db-create-7bttc\" (UID: \"5dcaedad-b38e-4fbb-ac6c-db2eea247d60\") " pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.333446 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7bb2\" (UniqueName: \"kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2\") pod \"keystone-db-create-7bttc\" (UID: \"5dcaedad-b38e-4fbb-ac6c-db2eea247d60\") " pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.355247 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7bb2\" (UniqueName: \"kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2\") pod \"keystone-db-create-7bttc\" (UID: \"5dcaedad-b38e-4fbb-ac6c-db2eea247d60\") " pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.440168 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.767461 4762 scope.go:117] "RemoveContainer" containerID="dc4f313ab5d5a69920d1b58c713b06e5ed0a9e035ded3f48d92455732392f7f0" Oct 09 14:53:31 crc kubenswrapper[4762]: I1009 14:53:31.835304 4762 scope.go:117] "RemoveContainer" containerID="6328c29e48f14036c15fc449ecfe34f22a9585dd28b2d2e9b9b85569ea9f4da8" Oct 09 14:53:32 crc kubenswrapper[4762]: I1009 14:53:32.248665 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7bttc"] Oct 09 14:53:32 crc kubenswrapper[4762]: W1009 14:53:32.252990 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5dcaedad_b38e_4fbb_ac6c_db2eea247d60.slice/crio-70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200 WatchSource:0}: Error finding container 70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200: Status 404 returned error can't find the container with id 70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200 Oct 09 14:53:32 crc kubenswrapper[4762]: I1009 14:53:32.466218 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7bttc" event={"ID":"5dcaedad-b38e-4fbb-ac6c-db2eea247d60","Type":"ContainerStarted","Data":"c542fb80f5c9fd84c36d89efb7ea6bf26e405f7f51f1f4db5c6cfa2fe8f17758"} Oct 09 14:53:32 crc kubenswrapper[4762]: I1009 14:53:32.466274 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7bttc" event={"ID":"5dcaedad-b38e-4fbb-ac6c-db2eea247d60","Type":"ContainerStarted","Data":"70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200"} Oct 09 14:53:32 crc kubenswrapper[4762]: I1009 14:53:32.485759 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-7bttc" podStartSLOduration=1.485726955 podStartE2EDuration="1.485726955s" podCreationTimestamp="2025-10-09 14:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:32.4809286 +0000 UTC m=+5288.254719659" watchObservedRunningTime="2025-10-09 14:53:32.485726955 +0000 UTC m=+5288.259517994" Oct 09 14:53:33 crc kubenswrapper[4762]: I1009 14:53:33.479278 4762 generic.go:334] "Generic (PLEG): container finished" podID="5dcaedad-b38e-4fbb-ac6c-db2eea247d60" containerID="c542fb80f5c9fd84c36d89efb7ea6bf26e405f7f51f1f4db5c6cfa2fe8f17758" exitCode=0 Oct 09 14:53:33 crc kubenswrapper[4762]: I1009 14:53:33.479378 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7bttc" event={"ID":"5dcaedad-b38e-4fbb-ac6c-db2eea247d60","Type":"ContainerDied","Data":"c542fb80f5c9fd84c36d89efb7ea6bf26e405f7f51f1f4db5c6cfa2fe8f17758"} Oct 09 14:53:34 crc kubenswrapper[4762]: I1009 14:53:34.850072 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:34 crc kubenswrapper[4762]: I1009 14:53:34.889670 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7bb2\" (UniqueName: \"kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2\") pod \"5dcaedad-b38e-4fbb-ac6c-db2eea247d60\" (UID: \"5dcaedad-b38e-4fbb-ac6c-db2eea247d60\") " Oct 09 14:53:34 crc kubenswrapper[4762]: I1009 14:53:34.896810 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2" (OuterVolumeSpecName: "kube-api-access-q7bb2") pod "5dcaedad-b38e-4fbb-ac6c-db2eea247d60" (UID: "5dcaedad-b38e-4fbb-ac6c-db2eea247d60"). InnerVolumeSpecName "kube-api-access-q7bb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:53:34 crc kubenswrapper[4762]: I1009 14:53:34.991444 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7bb2\" (UniqueName: \"kubernetes.io/projected/5dcaedad-b38e-4fbb-ac6c-db2eea247d60-kube-api-access-q7bb2\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:35 crc kubenswrapper[4762]: I1009 14:53:35.501475 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7bttc" Oct 09 14:53:35 crc kubenswrapper[4762]: I1009 14:53:35.501465 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7bttc" event={"ID":"5dcaedad-b38e-4fbb-ac6c-db2eea247d60","Type":"ContainerDied","Data":"70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200"} Oct 09 14:53:35 crc kubenswrapper[4762]: I1009 14:53:35.501560 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70c505f40ece2d43693484774e8c10c2565d2aa6dee0b9e46e0f8a90e45e5200" Oct 09 14:53:36 crc kubenswrapper[4762]: I1009 14:53:36.224538 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.145128 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-49cd-account-create-gn5rt"] Oct 09 14:53:41 crc kubenswrapper[4762]: E1009 14:53:41.145711 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dcaedad-b38e-4fbb-ac6c-db2eea247d60" containerName="mariadb-database-create" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.145724 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dcaedad-b38e-4fbb-ac6c-db2eea247d60" containerName="mariadb-database-create" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.145887 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dcaedad-b38e-4fbb-ac6c-db2eea247d60" containerName="mariadb-database-create" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.146449 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.148954 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.162861 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-49cd-account-create-gn5rt"] Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.194937 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scj4g\" (UniqueName: \"kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g\") pod \"keystone-49cd-account-create-gn5rt\" (UID: \"fdba07a6-2fbb-454b-9d69-be7db43f4d3b\") " pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.295989 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scj4g\" (UniqueName: \"kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g\") pod \"keystone-49cd-account-create-gn5rt\" (UID: \"fdba07a6-2fbb-454b-9d69-be7db43f4d3b\") " pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.315516 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scj4g\" (UniqueName: \"kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g\") pod \"keystone-49cd-account-create-gn5rt\" (UID: \"fdba07a6-2fbb-454b-9d69-be7db43f4d3b\") " pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.481662 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.940085 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-49cd-account-create-gn5rt"] Oct 09 14:53:41 crc kubenswrapper[4762]: W1009 14:53:41.946837 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdba07a6_2fbb_454b_9d69_be7db43f4d3b.slice/crio-7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32 WatchSource:0}: Error finding container 7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32: Status 404 returned error can't find the container with id 7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32 Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.969155 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.969227 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.969267 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.969608 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 14:53:41 crc kubenswrapper[4762]: I1009 14:53:41.969674 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" gracePeriod=600 Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.563796 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" exitCode=0 Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.563866 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735"} Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.563900 4762 scope.go:117] "RemoveContainer" containerID="3000d51a4aadf3d2620258ddf3cdf3f8532d23cabadc7484a369784c4a6006b5" Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.567614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49cd-account-create-gn5rt" event={"ID":"fdba07a6-2fbb-454b-9d69-be7db43f4d3b","Type":"ContainerStarted","Data":"7e0f56b6876bdc835a0d3fcf23df65e00d7573a63f8e07a618a1dd4d25b7c338"} Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.567674 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49cd-account-create-gn5rt" event={"ID":"fdba07a6-2fbb-454b-9d69-be7db43f4d3b","Type":"ContainerStarted","Data":"7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32"} Oct 09 14:53:42 crc kubenswrapper[4762]: I1009 14:53:42.584460 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-49cd-account-create-gn5rt" podStartSLOduration=1.584436995 podStartE2EDuration="1.584436995s" podCreationTimestamp="2025-10-09 14:53:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:42.583232045 +0000 UTC m=+5298.357023094" watchObservedRunningTime="2025-10-09 14:53:42.584436995 +0000 UTC m=+5298.358228034" Oct 09 14:53:42 crc kubenswrapper[4762]: E1009 14:53:42.696071 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:53:43 crc kubenswrapper[4762]: I1009 14:53:43.580306 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:53:43 crc kubenswrapper[4762]: E1009 14:53:43.582356 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:53:43 crc kubenswrapper[4762]: I1009 14:53:43.589252 4762 generic.go:334] "Generic (PLEG): container finished" podID="fdba07a6-2fbb-454b-9d69-be7db43f4d3b" containerID="7e0f56b6876bdc835a0d3fcf23df65e00d7573a63f8e07a618a1dd4d25b7c338" exitCode=0 Oct 09 14:53:43 crc kubenswrapper[4762]: I1009 14:53:43.589355 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49cd-account-create-gn5rt" event={"ID":"fdba07a6-2fbb-454b-9d69-be7db43f4d3b","Type":"ContainerDied","Data":"7e0f56b6876bdc835a0d3fcf23df65e00d7573a63f8e07a618a1dd4d25b7c338"} Oct 09 14:53:44 crc kubenswrapper[4762]: I1009 14:53:44.934651 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.061763 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scj4g\" (UniqueName: \"kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g\") pod \"fdba07a6-2fbb-454b-9d69-be7db43f4d3b\" (UID: \"fdba07a6-2fbb-454b-9d69-be7db43f4d3b\") " Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.071219 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g" (OuterVolumeSpecName: "kube-api-access-scj4g") pod "fdba07a6-2fbb-454b-9d69-be7db43f4d3b" (UID: "fdba07a6-2fbb-454b-9d69-be7db43f4d3b"). InnerVolumeSpecName "kube-api-access-scj4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.164527 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scj4g\" (UniqueName: \"kubernetes.io/projected/fdba07a6-2fbb-454b-9d69-be7db43f4d3b-kube-api-access-scj4g\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.616265 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49cd-account-create-gn5rt" event={"ID":"fdba07a6-2fbb-454b-9d69-be7db43f4d3b","Type":"ContainerDied","Data":"7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32"} Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.616582 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c3e236ab5b69b537d3b23dd52f2edd1486c4c288bcd48ff8750db527d8ebf32" Oct 09 14:53:45 crc kubenswrapper[4762]: I1009 14:53:45.616377 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49cd-account-create-gn5rt" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.554653 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4ztzj"] Oct 09 14:53:46 crc kubenswrapper[4762]: E1009 14:53:46.555008 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdba07a6-2fbb-454b-9d69-be7db43f4d3b" containerName="mariadb-account-create" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.555021 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdba07a6-2fbb-454b-9d69-be7db43f4d3b" containerName="mariadb-account-create" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.555186 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdba07a6-2fbb-454b-9d69-be7db43f4d3b" containerName="mariadb-account-create" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.555719 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.557455 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.558085 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-4n8qc" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.560366 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.560600 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.566296 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ztzj"] Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.588748 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptnlb\" (UniqueName: \"kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.588795 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.588887 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.691295 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptnlb\" (UniqueName: \"kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.691359 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.691437 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.696538 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.696660 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.712722 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptnlb\" (UniqueName: \"kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb\") pod \"keystone-db-sync-4ztzj\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:46 crc kubenswrapper[4762]: I1009 14:53:46.881363 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:47 crc kubenswrapper[4762]: I1009 14:53:47.287294 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4ztzj"] Oct 09 14:53:47 crc kubenswrapper[4762]: W1009 14:53:47.289906 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bf666c8_1160_4e16_9dc6_3ebcff957d3b.slice/crio-7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9 WatchSource:0}: Error finding container 7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9: Status 404 returned error can't find the container with id 7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9 Oct 09 14:53:47 crc kubenswrapper[4762]: I1009 14:53:47.632491 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ztzj" event={"ID":"5bf666c8-1160-4e16-9dc6-3ebcff957d3b","Type":"ContainerStarted","Data":"1e54dee4e73901b9126d7b01c15f75cb3978e236c6017212be1ebd23df48dd5d"} Oct 09 14:53:47 crc kubenswrapper[4762]: I1009 14:53:47.632872 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ztzj" event={"ID":"5bf666c8-1160-4e16-9dc6-3ebcff957d3b","Type":"ContainerStarted","Data":"7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9"} Oct 09 14:53:47 crc kubenswrapper[4762]: I1009 14:53:47.662303 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4ztzj" podStartSLOduration=1.6622751839999999 podStartE2EDuration="1.662275184s" podCreationTimestamp="2025-10-09 14:53:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:47.650386735 +0000 UTC m=+5303.424177774" watchObservedRunningTime="2025-10-09 14:53:47.662275184 +0000 UTC m=+5303.436066233" Oct 09 14:53:49 crc kubenswrapper[4762]: I1009 14:53:49.648025 4762 generic.go:334] "Generic (PLEG): container finished" podID="5bf666c8-1160-4e16-9dc6-3ebcff957d3b" containerID="1e54dee4e73901b9126d7b01c15f75cb3978e236c6017212be1ebd23df48dd5d" exitCode=0 Oct 09 14:53:49 crc kubenswrapper[4762]: I1009 14:53:49.648055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ztzj" event={"ID":"5bf666c8-1160-4e16-9dc6-3ebcff957d3b","Type":"ContainerDied","Data":"1e54dee4e73901b9126d7b01c15f75cb3978e236c6017212be1ebd23df48dd5d"} Oct 09 14:53:50 crc kubenswrapper[4762]: I1009 14:53:50.942183 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.066864 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data\") pod \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.067014 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptnlb\" (UniqueName: \"kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb\") pod \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.067063 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle\") pod \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\" (UID: \"5bf666c8-1160-4e16-9dc6-3ebcff957d3b\") " Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.073255 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb" (OuterVolumeSpecName: "kube-api-access-ptnlb") pod "5bf666c8-1160-4e16-9dc6-3ebcff957d3b" (UID: "5bf666c8-1160-4e16-9dc6-3ebcff957d3b"). InnerVolumeSpecName "kube-api-access-ptnlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.097691 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bf666c8-1160-4e16-9dc6-3ebcff957d3b" (UID: "5bf666c8-1160-4e16-9dc6-3ebcff957d3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.112527 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data" (OuterVolumeSpecName: "config-data") pod "5bf666c8-1160-4e16-9dc6-3ebcff957d3b" (UID: "5bf666c8-1160-4e16-9dc6-3ebcff957d3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.168784 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.168849 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptnlb\" (UniqueName: \"kubernetes.io/projected/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-kube-api-access-ptnlb\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.168864 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bf666c8-1160-4e16-9dc6-3ebcff957d3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.677009 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4ztzj" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.677000 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4ztzj" event={"ID":"5bf666c8-1160-4e16-9dc6-3ebcff957d3b","Type":"ContainerDied","Data":"7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9"} Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.677161 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7820e88f95b150dacb1a3ed3f4a6c7d0d388f1b0f5eb810e7e959a57307665c9" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.926072 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:53:51 crc kubenswrapper[4762]: E1009 14:53:51.926482 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bf666c8-1160-4e16-9dc6-3ebcff957d3b" containerName="keystone-db-sync" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.926506 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bf666c8-1160-4e16-9dc6-3ebcff957d3b" containerName="keystone-db-sync" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.926795 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bf666c8-1160-4e16-9dc6-3ebcff957d3b" containerName="keystone-db-sync" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.927901 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.946136 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.986252 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-sk88k"] Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.987992 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.991139 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.991394 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-4n8qc" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.991474 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 14:53:51 crc kubenswrapper[4762]: I1009 14:53:51.991438 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.006554 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sk88k"] Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.082764 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.082840 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.082861 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.083035 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhh6g\" (UniqueName: \"kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.083223 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.184887 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.184937 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.184959 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.184988 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185030 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhh6g\" (UniqueName: \"kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185097 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185139 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185164 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185237 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn7ln\" (UniqueName: \"kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185262 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185288 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.185913 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.186077 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.186144 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.186287 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.204579 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhh6g\" (UniqueName: \"kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g\") pod \"dnsmasq-dns-6f8b9b55c-z9d5q\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.249033 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286197 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286243 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286314 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn7ln\" (UniqueName: \"kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286342 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286398 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.286426 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.290728 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.291658 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.292150 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.295989 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.297184 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.307907 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn7ln\" (UniqueName: \"kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln\") pod \"keystone-bootstrap-sk88k\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.606567 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:53:52 crc kubenswrapper[4762]: W1009 14:53:52.728205 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod499e5912_ebea_47bd_9a52_2b9d51b7bfe2.slice/crio-a7f3c6a05a904a99c24ff04e6c955fe43955a3f237d28a2d84057b509a79fd17 WatchSource:0}: Error finding container a7f3c6a05a904a99c24ff04e6c955fe43955a3f237d28a2d84057b509a79fd17: Status 404 returned error can't find the container with id a7f3c6a05a904a99c24ff04e6c955fe43955a3f237d28a2d84057b509a79fd17 Oct 09 14:53:52 crc kubenswrapper[4762]: I1009 14:53:52.731424 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.101338 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sk88k"] Oct 09 14:53:53 crc kubenswrapper[4762]: W1009 14:53:53.109066 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0438fe8e_e20e_4fbf_91d4_6f26c6c964dc.slice/crio-adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282 WatchSource:0}: Error finding container adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282: Status 404 returned error can't find the container with id adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282 Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.728898 4762 generic.go:334] "Generic (PLEG): container finished" podID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerID="2c3ceb634c42f3e3752c1d8c9cb3a7021251f88235822fc719e1e3bb36ff73e8" exitCode=0 Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.728990 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" event={"ID":"499e5912-ebea-47bd-9a52-2b9d51b7bfe2","Type":"ContainerDied","Data":"2c3ceb634c42f3e3752c1d8c9cb3a7021251f88235822fc719e1e3bb36ff73e8"} Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.729387 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" event={"ID":"499e5912-ebea-47bd-9a52-2b9d51b7bfe2","Type":"ContainerStarted","Data":"a7f3c6a05a904a99c24ff04e6c955fe43955a3f237d28a2d84057b509a79fd17"} Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.734861 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sk88k" event={"ID":"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc","Type":"ContainerStarted","Data":"efb3e8262647e061b5c096751e3d1c2ac620fc86b42be91d3719f7b0b106f1c6"} Oct 09 14:53:53 crc kubenswrapper[4762]: I1009 14:53:53.734920 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sk88k" event={"ID":"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc","Type":"ContainerStarted","Data":"adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282"} Oct 09 14:53:54 crc kubenswrapper[4762]: I1009 14:53:54.746503 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" event={"ID":"499e5912-ebea-47bd-9a52-2b9d51b7bfe2","Type":"ContainerStarted","Data":"134cd8e0b9e042a3a888d3300b1a346ee8f94afb5ddc0db2077a9c336c51f519"} Oct 09 14:53:54 crc kubenswrapper[4762]: I1009 14:53:54.778907 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" podStartSLOduration=3.7788720590000002 podStartE2EDuration="3.778872059s" podCreationTimestamp="2025-10-09 14:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:54.767689828 +0000 UTC m=+5310.541480867" watchObservedRunningTime="2025-10-09 14:53:54.778872059 +0000 UTC m=+5310.552663108" Oct 09 14:53:54 crc kubenswrapper[4762]: I1009 14:53:54.782386 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-sk88k" podStartSLOduration=3.7823729 podStartE2EDuration="3.7823729s" podCreationTimestamp="2025-10-09 14:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:53:53.790078189 +0000 UTC m=+5309.563869228" watchObservedRunningTime="2025-10-09 14:53:54.7823729 +0000 UTC m=+5310.556163939" Oct 09 14:53:55 crc kubenswrapper[4762]: I1009 14:53:55.752971 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:53:55 crc kubenswrapper[4762]: I1009 14:53:55.965018 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:53:55 crc kubenswrapper[4762]: E1009 14:53:55.965366 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:53:59 crc kubenswrapper[4762]: I1009 14:53:59.783866 4762 generic.go:334] "Generic (PLEG): container finished" podID="0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" containerID="efb3e8262647e061b5c096751e3d1c2ac620fc86b42be91d3719f7b0b106f1c6" exitCode=0 Oct 09 14:53:59 crc kubenswrapper[4762]: I1009 14:53:59.783960 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sk88k" event={"ID":"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc","Type":"ContainerDied","Data":"efb3e8262647e061b5c096751e3d1c2ac620fc86b42be91d3719f7b0b106f1c6"} Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.373932 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.554835 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn7ln\" (UniqueName: \"kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.555256 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.555348 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.555417 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.555494 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.555530 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys\") pod \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\" (UID: \"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc\") " Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.560855 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts" (OuterVolumeSpecName: "scripts") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.561407 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.561452 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln" (OuterVolumeSpecName: "kube-api-access-bn7ln") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "kube-api-access-bn7ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.562162 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.579398 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data" (OuterVolumeSpecName: "config-data") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.581321 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" (UID: "0438fe8e-e20e-4fbf-91d4-6f26c6c964dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657572 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn7ln\" (UniqueName: \"kubernetes.io/projected/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-kube-api-access-bn7ln\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657609 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657621 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657631 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657707 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.657717 4762 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.801672 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sk88k" event={"ID":"0438fe8e-e20e-4fbf-91d4-6f26c6c964dc","Type":"ContainerDied","Data":"adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282"} Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.801700 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sk88k" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.801715 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adc035110412c36b99caa7e0ed8d48e9986f09f23bfd339f1bf0192e8e10a282" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.892564 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-sk88k"] Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.897989 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-sk88k"] Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.974075 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-h2vz7"] Oct 09 14:54:01 crc kubenswrapper[4762]: E1009 14:54:01.974553 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" containerName="keystone-bootstrap" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.974579 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" containerName="keystone-bootstrap" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.974801 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" containerName="keystone-bootstrap" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.975595 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.978626 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-4n8qc" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.978820 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.978859 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.979921 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 14:54:01 crc kubenswrapper[4762]: I1009 14:54:01.984228 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-h2vz7"] Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168073 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168216 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cl48\" (UniqueName: \"kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168255 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168450 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168480 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.168507 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.251285 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269548 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269663 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cl48\" (UniqueName: \"kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269688 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269791 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269809 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.269828 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.275542 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.275729 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.276285 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.277683 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.280033 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.288375 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cl48\" (UniqueName: \"kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48\") pod \"keystone-bootstrap-h2vz7\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.330483 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.331086 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="dnsmasq-dns" containerID="cri-o://02adad4bf412503c556a0a26c6455a6ec4f87c56b89a8567590effd52a2207a3" gracePeriod=10 Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.332404 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.810583 4762 generic.go:334] "Generic (PLEG): container finished" podID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerID="02adad4bf412503c556a0a26c6455a6ec4f87c56b89a8567590effd52a2207a3" exitCode=0 Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.810654 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" event={"ID":"f2f20c73-29a3-48a9-bbf4-3a8677b631ee","Type":"ContainerDied","Data":"02adad4bf412503c556a0a26c6455a6ec4f87c56b89a8567590effd52a2207a3"} Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.954555 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-h2vz7"] Oct 09 14:54:02 crc kubenswrapper[4762]: I1009 14:54:02.974208 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0438fe8e-e20e-4fbf-91d4-6f26c6c964dc" path="/var/lib/kubelet/pods/0438fe8e-e20e-4fbf-91d4-6f26c6c964dc/volumes" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.305917 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.489074 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb\") pod \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.489143 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2h2f\" (UniqueName: \"kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f\") pod \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.489368 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb\") pod \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.489491 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config\") pod \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.489525 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc\") pod \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\" (UID: \"f2f20c73-29a3-48a9-bbf4-3a8677b631ee\") " Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.496405 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f" (OuterVolumeSpecName: "kube-api-access-z2h2f") pod "f2f20c73-29a3-48a9-bbf4-3a8677b631ee" (UID: "f2f20c73-29a3-48a9-bbf4-3a8677b631ee"). InnerVolumeSpecName "kube-api-access-z2h2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.536135 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2f20c73-29a3-48a9-bbf4-3a8677b631ee" (UID: "f2f20c73-29a3-48a9-bbf4-3a8677b631ee"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.536390 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f2f20c73-29a3-48a9-bbf4-3a8677b631ee" (UID: "f2f20c73-29a3-48a9-bbf4-3a8677b631ee"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.537964 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config" (OuterVolumeSpecName: "config") pod "f2f20c73-29a3-48a9-bbf4-3a8677b631ee" (UID: "f2f20c73-29a3-48a9-bbf4-3a8677b631ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.552436 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f2f20c73-29a3-48a9-bbf4-3a8677b631ee" (UID: "f2f20c73-29a3-48a9-bbf4-3a8677b631ee"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.591580 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.591621 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.591654 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.591670 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2h2f\" (UniqueName: \"kubernetes.io/projected/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-kube-api-access-z2h2f\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.591682 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2f20c73-29a3-48a9-bbf4-3a8677b631ee-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.823106 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h2vz7" event={"ID":"525c4acb-a6b7-41d8-88d3-228e8833a6cc","Type":"ContainerStarted","Data":"dd40c729d048bb87fb25830d2b9cd9b8a791b91dc3d2291420821cdc7671d43e"} Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.823185 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h2vz7" event={"ID":"525c4acb-a6b7-41d8-88d3-228e8833a6cc","Type":"ContainerStarted","Data":"936d62587b7437cc5be23ee30ee32e508aa5a0158d2c4573bce6f560c017335e"} Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.825473 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" event={"ID":"f2f20c73-29a3-48a9-bbf4-3a8677b631ee","Type":"ContainerDied","Data":"a9d1657c4e2c3116a8718d6c574c92ba6cbbff3d9c89b86f44b69e025629af84"} Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.825522 4762 scope.go:117] "RemoveContainer" containerID="02adad4bf412503c556a0a26c6455a6ec4f87c56b89a8567590effd52a2207a3" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.825650 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c99d997-d6f7h" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.851022 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-h2vz7" podStartSLOduration=2.850999759 podStartE2EDuration="2.850999759s" podCreationTimestamp="2025-10-09 14:54:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:54:03.847247072 +0000 UTC m=+5319.621038131" watchObservedRunningTime="2025-10-09 14:54:03.850999759 +0000 UTC m=+5319.624790798" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.861937 4762 scope.go:117] "RemoveContainer" containerID="28babd730a097fafc9ec51cf64e42fd5df1a8b727d1b7952076d72161a3b3198" Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.876682 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:54:03 crc kubenswrapper[4762]: I1009 14:54:03.884475 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c9c99d997-d6f7h"] Oct 09 14:54:04 crc kubenswrapper[4762]: I1009 14:54:04.978974 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" path="/var/lib/kubelet/pods/f2f20c73-29a3-48a9-bbf4-3a8677b631ee/volumes" Oct 09 14:54:06 crc kubenswrapper[4762]: I1009 14:54:06.860576 4762 generic.go:334] "Generic (PLEG): container finished" podID="525c4acb-a6b7-41d8-88d3-228e8833a6cc" containerID="dd40c729d048bb87fb25830d2b9cd9b8a791b91dc3d2291420821cdc7671d43e" exitCode=0 Oct 09 14:54:06 crc kubenswrapper[4762]: I1009 14:54:06.860840 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h2vz7" event={"ID":"525c4acb-a6b7-41d8-88d3-228e8833a6cc","Type":"ContainerDied","Data":"dd40c729d048bb87fb25830d2b9cd9b8a791b91dc3d2291420821cdc7671d43e"} Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.173854 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.263919 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.263977 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cl48\" (UniqueName: \"kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.264038 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.264759 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.265053 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.265090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys\") pod \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\" (UID: \"525c4acb-a6b7-41d8-88d3-228e8833a6cc\") " Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.268612 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts" (OuterVolumeSpecName: "scripts") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.268763 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48" (OuterVolumeSpecName: "kube-api-access-2cl48") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "kube-api-access-2cl48". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.269121 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.276648 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.287754 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data" (OuterVolumeSpecName: "config-data") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.294361 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "525c4acb-a6b7-41d8-88d3-228e8833a6cc" (UID: "525c4acb-a6b7-41d8-88d3-228e8833a6cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367460 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cl48\" (UniqueName: \"kubernetes.io/projected/525c4acb-a6b7-41d8-88d3-228e8833a6cc-kube-api-access-2cl48\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367492 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367502 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367511 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367519 4762 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.367527 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/525c4acb-a6b7-41d8-88d3-228e8833a6cc-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.879791 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-h2vz7" event={"ID":"525c4acb-a6b7-41d8-88d3-228e8833a6cc","Type":"ContainerDied","Data":"936d62587b7437cc5be23ee30ee32e508aa5a0158d2c4573bce6f560c017335e"} Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.879830 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-h2vz7" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.879841 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="936d62587b7437cc5be23ee30ee32e508aa5a0158d2c4573bce6f560c017335e" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.974515 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7f9d7865f9-j75xk"] Oct 09 14:54:08 crc kubenswrapper[4762]: E1009 14:54:08.974784 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="dnsmasq-dns" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.974800 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="dnsmasq-dns" Oct 09 14:54:08 crc kubenswrapper[4762]: E1009 14:54:08.974815 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="init" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.974822 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="init" Oct 09 14:54:08 crc kubenswrapper[4762]: E1009 14:54:08.974840 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="525c4acb-a6b7-41d8-88d3-228e8833a6cc" containerName="keystone-bootstrap" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.974846 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="525c4acb-a6b7-41d8-88d3-228e8833a6cc" containerName="keystone-bootstrap" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.975004 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2f20c73-29a3-48a9-bbf4-3a8677b631ee" containerName="dnsmasq-dns" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.975023 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="525c4acb-a6b7-41d8-88d3-228e8833a6cc" containerName="keystone-bootstrap" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.975652 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.977809 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.977886 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.977890 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f9d7865f9-j75xk"] Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.977886 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-4n8qc" Oct 09 14:54:08 crc kubenswrapper[4762]: I1009 14:54:08.978015 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.077548 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9bm2\" (UniqueName: \"kubernetes.io/projected/445db2dd-8394-428c-9859-34882f87cefa-kube-api-access-j9bm2\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.077833 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-credential-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.077868 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-scripts\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.077942 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-combined-ca-bundle\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.077963 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-fernet-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.078314 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-config-data\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180198 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9bm2\" (UniqueName: \"kubernetes.io/projected/445db2dd-8394-428c-9859-34882f87cefa-kube-api-access-j9bm2\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180250 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-credential-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180310 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-scripts\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180360 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-combined-ca-bundle\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180384 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-fernet-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.180445 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-config-data\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.184739 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-combined-ca-bundle\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.185883 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-scripts\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.186613 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-credential-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.186792 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-config-data\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.186939 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/445db2dd-8394-428c-9859-34882f87cefa-fernet-keys\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.199835 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9bm2\" (UniqueName: \"kubernetes.io/projected/445db2dd-8394-428c-9859-34882f87cefa-kube-api-access-j9bm2\") pod \"keystone-7f9d7865f9-j75xk\" (UID: \"445db2dd-8394-428c-9859-34882f87cefa\") " pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.292659 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.739859 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7f9d7865f9-j75xk"] Oct 09 14:54:09 crc kubenswrapper[4762]: I1009 14:54:09.888401 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f9d7865f9-j75xk" event={"ID":"445db2dd-8394-428c-9859-34882f87cefa","Type":"ContainerStarted","Data":"25e75a0790f1c15d42dd0f6c2bb6f0adf4dd7f80d9f5b348cb760c545cd07123"} Oct 09 14:54:10 crc kubenswrapper[4762]: I1009 14:54:10.897023 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7f9d7865f9-j75xk" event={"ID":"445db2dd-8394-428c-9859-34882f87cefa","Type":"ContainerStarted","Data":"286a3cf4d2b798c2f483b18a5bce7cc3ed17eba94fc6861a7d04b5d89d5ae00c"} Oct 09 14:54:10 crc kubenswrapper[4762]: I1009 14:54:10.897386 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:10 crc kubenswrapper[4762]: I1009 14:54:10.965337 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:54:10 crc kubenswrapper[4762]: E1009 14:54:10.965601 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:54:21 crc kubenswrapper[4762]: I1009 14:54:21.965243 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:54:21 crc kubenswrapper[4762]: E1009 14:54:21.966139 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:54:32 crc kubenswrapper[4762]: I1009 14:54:32.040575 4762 scope.go:117] "RemoveContainer" containerID="c6e7f1be2eab9b2ec2521169e4430dfbc9c38012054b456a66d40cf929577bf1" Oct 09 14:54:32 crc kubenswrapper[4762]: I1009 14:54:32.072481 4762 scope.go:117] "RemoveContainer" containerID="ee89763b25466905e8dd8baf2edb3e80825ee66aa884f9eb56d914103cb24cea" Oct 09 14:54:32 crc kubenswrapper[4762]: I1009 14:54:32.092613 4762 scope.go:117] "RemoveContainer" containerID="6a88fae016ca2e7c1536581d6d36c2458cdeea0d6afb1390f73f7bcaaded3dcd" Oct 09 14:54:36 crc kubenswrapper[4762]: I1009 14:54:36.965303 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:54:36 crc kubenswrapper[4762]: E1009 14:54:36.967344 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:54:40 crc kubenswrapper[4762]: I1009 14:54:40.767162 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7f9d7865f9-j75xk" Oct 09 14:54:40 crc kubenswrapper[4762]: I1009 14:54:40.787290 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7f9d7865f9-j75xk" podStartSLOduration=32.78727032 podStartE2EDuration="32.78727032s" podCreationTimestamp="2025-10-09 14:54:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:54:10.91257739 +0000 UTC m=+5326.686368429" watchObservedRunningTime="2025-10-09 14:54:40.78727032 +0000 UTC m=+5356.561061359" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.186036 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.187155 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.190814 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.191030 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.191225 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-vrk6q" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.197127 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.213717 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: E1009 14:54:41.214258 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-v27qj openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[kube-api-access-v27qj openstack-config openstack-config-secret]: context canceled" pod="openstack/openstackclient" podUID="95e92abe-018f-42a0-8a3a-cf8d985aaef2" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.221122 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.247920 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.249081 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.257595 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.307982 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt472\" (UniqueName: \"kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.308083 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.308114 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.409957 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt472\" (UniqueName: \"kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.410138 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.410195 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.410998 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.416824 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.426377 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt472\" (UniqueName: \"kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472\") pod \"openstackclient\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.568078 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:41 crc kubenswrapper[4762]: I1009 14:54:41.992395 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 14:54:42 crc kubenswrapper[4762]: I1009 14:54:42.173844 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:42 crc kubenswrapper[4762]: I1009 14:54:42.174571 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5","Type":"ContainerStarted","Data":"3cfda98f02d2427b4040cad2756539afd0b9bd6ed7d6195ea221a93e7af5366c"} Oct 09 14:54:42 crc kubenswrapper[4762]: I1009 14:54:42.181307 4762 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="95e92abe-018f-42a0-8a3a-cf8d985aaef2" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" Oct 09 14:54:42 crc kubenswrapper[4762]: I1009 14:54:42.190624 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:42 crc kubenswrapper[4762]: I1009 14:54:42.976624 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95e92abe-018f-42a0-8a3a-cf8d985aaef2" path="/var/lib/kubelet/pods/95e92abe-018f-42a0-8a3a-cf8d985aaef2/volumes" Oct 09 14:54:43 crc kubenswrapper[4762]: I1009 14:54:43.181834 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 14:54:43 crc kubenswrapper[4762]: I1009 14:54:43.181900 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5","Type":"ContainerStarted","Data":"576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9"} Oct 09 14:54:43 crc kubenswrapper[4762]: I1009 14:54:43.203409 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.203390429 podStartE2EDuration="2.203390429s" podCreationTimestamp="2025-10-09 14:54:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:54:43.202594489 +0000 UTC m=+5358.976385528" watchObservedRunningTime="2025-10-09 14:54:43.203390429 +0000 UTC m=+5358.977181468" Oct 09 14:54:43 crc kubenswrapper[4762]: I1009 14:54:43.206727 4762 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="95e92abe-018f-42a0-8a3a-cf8d985aaef2" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" Oct 09 14:54:49 crc kubenswrapper[4762]: I1009 14:54:49.965610 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:54:49 crc kubenswrapper[4762]: E1009 14:54:49.967063 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:55:02 crc kubenswrapper[4762]: I1009 14:55:02.964661 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:55:02 crc kubenswrapper[4762]: E1009 14:55:02.965473 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:55:15 crc kubenswrapper[4762]: I1009 14:55:15.965316 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:55:15 crc kubenswrapper[4762]: E1009 14:55:15.966399 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:55:29 crc kubenswrapper[4762]: I1009 14:55:29.973229 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:55:29 crc kubenswrapper[4762]: E1009 14:55:29.974294 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:55:40 crc kubenswrapper[4762]: I1009 14:55:40.968951 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:55:40 crc kubenswrapper[4762]: E1009 14:55:40.969461 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:55:53 crc kubenswrapper[4762]: I1009 14:55:53.966805 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:55:53 crc kubenswrapper[4762]: E1009 14:55:53.967532 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:06 crc kubenswrapper[4762]: I1009 14:56:06.966026 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:56:06 crc kubenswrapper[4762]: E1009 14:56:06.967184 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:19 crc kubenswrapper[4762]: I1009 14:56:19.966407 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:56:19 crc kubenswrapper[4762]: E1009 14:56:19.967465 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.610529 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-f6csc"] Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.612380 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.622233 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-f6csc"] Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.669386 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59ht2\" (UniqueName: \"kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2\") pod \"barbican-db-create-f6csc\" (UID: \"f6edccc4-0e19-4df1-818f-d78f24277b7a\") " pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.770983 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59ht2\" (UniqueName: \"kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2\") pod \"barbican-db-create-f6csc\" (UID: \"f6edccc4-0e19-4df1-818f-d78f24277b7a\") " pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.798392 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59ht2\" (UniqueName: \"kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2\") pod \"barbican-db-create-f6csc\" (UID: \"f6edccc4-0e19-4df1-818f-d78f24277b7a\") " pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:27 crc kubenswrapper[4762]: I1009 14:56:27.955563 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:28 crc kubenswrapper[4762]: I1009 14:56:28.407897 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-f6csc"] Oct 09 14:56:29 crc kubenswrapper[4762]: I1009 14:56:29.121064 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f6csc" event={"ID":"f6edccc4-0e19-4df1-818f-d78f24277b7a","Type":"ContainerStarted","Data":"46f0bfbc887f24ecb585feaba4b2e1be8003d0e6c3082ffb0a2b2c2548083d6b"} Oct 09 14:56:29 crc kubenswrapper[4762]: I1009 14:56:29.121338 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f6csc" event={"ID":"f6edccc4-0e19-4df1-818f-d78f24277b7a","Type":"ContainerStarted","Data":"6b57c481746a46ef5165555e6765d7bdd7f1c8a864aea56d1d42a2431ab5f0f6"} Oct 09 14:56:30 crc kubenswrapper[4762]: I1009 14:56:30.131732 4762 generic.go:334] "Generic (PLEG): container finished" podID="f6edccc4-0e19-4df1-818f-d78f24277b7a" containerID="46f0bfbc887f24ecb585feaba4b2e1be8003d0e6c3082ffb0a2b2c2548083d6b" exitCode=0 Oct 09 14:56:30 crc kubenswrapper[4762]: I1009 14:56:30.131916 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f6csc" event={"ID":"f6edccc4-0e19-4df1-818f-d78f24277b7a","Type":"ContainerDied","Data":"46f0bfbc887f24ecb585feaba4b2e1be8003d0e6c3082ffb0a2b2c2548083d6b"} Oct 09 14:56:30 crc kubenswrapper[4762]: I1009 14:56:30.965393 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:56:30 crc kubenswrapper[4762]: E1009 14:56:30.965999 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:31 crc kubenswrapper[4762]: I1009 14:56:31.426466 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:31 crc kubenswrapper[4762]: I1009 14:56:31.532373 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59ht2\" (UniqueName: \"kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2\") pod \"f6edccc4-0e19-4df1-818f-d78f24277b7a\" (UID: \"f6edccc4-0e19-4df1-818f-d78f24277b7a\") " Oct 09 14:56:31 crc kubenswrapper[4762]: I1009 14:56:31.537369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2" (OuterVolumeSpecName: "kube-api-access-59ht2") pod "f6edccc4-0e19-4df1-818f-d78f24277b7a" (UID: "f6edccc4-0e19-4df1-818f-d78f24277b7a"). InnerVolumeSpecName "kube-api-access-59ht2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:56:31 crc kubenswrapper[4762]: I1009 14:56:31.634535 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59ht2\" (UniqueName: \"kubernetes.io/projected/f6edccc4-0e19-4df1-818f-d78f24277b7a-kube-api-access-59ht2\") on node \"crc\" DevicePath \"\"" Oct 09 14:56:32 crc kubenswrapper[4762]: I1009 14:56:32.148217 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-f6csc" event={"ID":"f6edccc4-0e19-4df1-818f-d78f24277b7a","Type":"ContainerDied","Data":"6b57c481746a46ef5165555e6765d7bdd7f1c8a864aea56d1d42a2431ab5f0f6"} Oct 09 14:56:32 crc kubenswrapper[4762]: I1009 14:56:32.148462 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b57c481746a46ef5165555e6765d7bdd7f1c8a864aea56d1d42a2431ab5f0f6" Oct 09 14:56:32 crc kubenswrapper[4762]: I1009 14:56:32.148305 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-f6csc" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.697695 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-9559-account-create-vwzr8"] Oct 09 14:56:37 crc kubenswrapper[4762]: E1009 14:56:37.698362 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6edccc4-0e19-4df1-818f-d78f24277b7a" containerName="mariadb-database-create" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.698380 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6edccc4-0e19-4df1-818f-d78f24277b7a" containerName="mariadb-database-create" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.698581 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6edccc4-0e19-4df1-818f-d78f24277b7a" containerName="mariadb-database-create" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.699119 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.703102 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.720298 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9559-account-create-vwzr8"] Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.833008 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxfnr\" (UniqueName: \"kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr\") pod \"barbican-9559-account-create-vwzr8\" (UID: \"df1781b9-04ea-4f11-af32-7af6bb2c6f07\") " pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.934419 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxfnr\" (UniqueName: \"kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr\") pod \"barbican-9559-account-create-vwzr8\" (UID: \"df1781b9-04ea-4f11-af32-7af6bb2c6f07\") " pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:37 crc kubenswrapper[4762]: I1009 14:56:37.956516 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxfnr\" (UniqueName: \"kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr\") pod \"barbican-9559-account-create-vwzr8\" (UID: \"df1781b9-04ea-4f11-af32-7af6bb2c6f07\") " pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:38 crc kubenswrapper[4762]: I1009 14:56:38.022048 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:38 crc kubenswrapper[4762]: I1009 14:56:38.473067 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-9559-account-create-vwzr8"] Oct 09 14:56:39 crc kubenswrapper[4762]: I1009 14:56:39.203788 4762 generic.go:334] "Generic (PLEG): container finished" podID="df1781b9-04ea-4f11-af32-7af6bb2c6f07" containerID="76a4aa6c17e32c18f840bcebadb8bb9329686c7c962ae308ef41b16b02d10316" exitCode=0 Oct 09 14:56:39 crc kubenswrapper[4762]: I1009 14:56:39.203994 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9559-account-create-vwzr8" event={"ID":"df1781b9-04ea-4f11-af32-7af6bb2c6f07","Type":"ContainerDied","Data":"76a4aa6c17e32c18f840bcebadb8bb9329686c7c962ae308ef41b16b02d10316"} Oct 09 14:56:39 crc kubenswrapper[4762]: I1009 14:56:39.204211 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9559-account-create-vwzr8" event={"ID":"df1781b9-04ea-4f11-af32-7af6bb2c6f07","Type":"ContainerStarted","Data":"1fc26d5c8dd7ef8a215bf8049c40bca4db50e3b103ffd2dc2a5fb3eb9a3ccc20"} Oct 09 14:56:40 crc kubenswrapper[4762]: I1009 14:56:40.495759 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:40 crc kubenswrapper[4762]: I1009 14:56:40.679773 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxfnr\" (UniqueName: \"kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr\") pod \"df1781b9-04ea-4f11-af32-7af6bb2c6f07\" (UID: \"df1781b9-04ea-4f11-af32-7af6bb2c6f07\") " Oct 09 14:56:40 crc kubenswrapper[4762]: I1009 14:56:40.690013 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr" (OuterVolumeSpecName: "kube-api-access-dxfnr") pod "df1781b9-04ea-4f11-af32-7af6bb2c6f07" (UID: "df1781b9-04ea-4f11-af32-7af6bb2c6f07"). InnerVolumeSpecName "kube-api-access-dxfnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:56:40 crc kubenswrapper[4762]: I1009 14:56:40.782722 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxfnr\" (UniqueName: \"kubernetes.io/projected/df1781b9-04ea-4f11-af32-7af6bb2c6f07-kube-api-access-dxfnr\") on node \"crc\" DevicePath \"\"" Oct 09 14:56:41 crc kubenswrapper[4762]: I1009 14:56:41.223556 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-9559-account-create-vwzr8" event={"ID":"df1781b9-04ea-4f11-af32-7af6bb2c6f07","Type":"ContainerDied","Data":"1fc26d5c8dd7ef8a215bf8049c40bca4db50e3b103ffd2dc2a5fb3eb9a3ccc20"} Oct 09 14:56:41 crc kubenswrapper[4762]: I1009 14:56:41.223861 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fc26d5c8dd7ef8a215bf8049c40bca4db50e3b103ffd2dc2a5fb3eb9a3ccc20" Oct 09 14:56:41 crc kubenswrapper[4762]: I1009 14:56:41.223605 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-9559-account-create-vwzr8" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.002300 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9ccf2"] Oct 09 14:56:43 crc kubenswrapper[4762]: E1009 14:56:43.002875 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df1781b9-04ea-4f11-af32-7af6bb2c6f07" containerName="mariadb-account-create" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.002888 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="df1781b9-04ea-4f11-af32-7af6bb2c6f07" containerName="mariadb-account-create" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.003035 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="df1781b9-04ea-4f11-af32-7af6bb2c6f07" containerName="mariadb-account-create" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.003564 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.007126 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-92blk" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.007186 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.022060 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9ccf2"] Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.022333 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.022421 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.022471 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q8ms\" (UniqueName: \"kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.123896 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.123995 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.124033 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q8ms\" (UniqueName: \"kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.128503 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.129309 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.139895 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q8ms\" (UniqueName: \"kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms\") pod \"barbican-db-sync-9ccf2\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.321237 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.777954 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9ccf2"] Oct 09 14:56:43 crc kubenswrapper[4762]: I1009 14:56:43.965162 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:56:43 crc kubenswrapper[4762]: E1009 14:56:43.965740 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:44 crc kubenswrapper[4762]: I1009 14:56:44.247955 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9ccf2" event={"ID":"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d","Type":"ContainerStarted","Data":"82554243ec96fd4f77cb39fd64c953e2d7914c8fb29579f17b5cccf81d0b62c0"} Oct 09 14:56:44 crc kubenswrapper[4762]: I1009 14:56:44.248009 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9ccf2" event={"ID":"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d","Type":"ContainerStarted","Data":"10fbf54baf60f0f67b6b784d34f51dd800d43d8f379fe0e2d44b5d951a415120"} Oct 09 14:56:44 crc kubenswrapper[4762]: I1009 14:56:44.266129 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9ccf2" podStartSLOduration=2.266111532 podStartE2EDuration="2.266111532s" podCreationTimestamp="2025-10-09 14:56:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:56:44.262418346 +0000 UTC m=+5480.036209405" watchObservedRunningTime="2025-10-09 14:56:44.266111532 +0000 UTC m=+5480.039902571" Oct 09 14:56:47 crc kubenswrapper[4762]: I1009 14:56:47.271311 4762 generic.go:334] "Generic (PLEG): container finished" podID="d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" containerID="82554243ec96fd4f77cb39fd64c953e2d7914c8fb29579f17b5cccf81d0b62c0" exitCode=0 Oct 09 14:56:47 crc kubenswrapper[4762]: I1009 14:56:47.271425 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9ccf2" event={"ID":"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d","Type":"ContainerDied","Data":"82554243ec96fd4f77cb39fd64c953e2d7914c8fb29579f17b5cccf81d0b62c0"} Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.565569 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.708421 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q8ms\" (UniqueName: \"kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms\") pod \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.708965 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle\") pod \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.709032 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data\") pod \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\" (UID: \"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d\") " Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.714326 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms" (OuterVolumeSpecName: "kube-api-access-6q8ms") pod "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" (UID: "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d"). InnerVolumeSpecName "kube-api-access-6q8ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.722461 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" (UID: "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.732914 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" (UID: "d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.810285 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.810327 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q8ms\" (UniqueName: \"kubernetes.io/projected/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-kube-api-access-6q8ms\") on node \"crc\" DevicePath \"\"" Oct 09 14:56:48 crc kubenswrapper[4762]: I1009 14:56:48.810338 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.287958 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9ccf2" event={"ID":"d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d","Type":"ContainerDied","Data":"10fbf54baf60f0f67b6b784d34f51dd800d43d8f379fe0e2d44b5d951a415120"} Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.288004 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10fbf54baf60f0f67b6b784d34f51dd800d43d8f379fe0e2d44b5d951a415120" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.288034 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9ccf2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.547667 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-65686d9dff-wlj6h"] Oct 09 14:56:49 crc kubenswrapper[4762]: E1009 14:56:49.548036 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" containerName="barbican-db-sync" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.548050 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" containerName="barbican-db-sync" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.548243 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" containerName="barbican-db-sync" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.549087 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.557746 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.558239 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-92blk" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.558266 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.563620 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65686d9dff-wlj6h"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.583314 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.584584 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.587842 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.612398 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623159 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmzk8\" (UniqueName: \"kubernetes.io/projected/d7d04d08-8a53-47ee-9a23-17a13763b032-kube-api-access-fmzk8\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623211 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data-custom\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623267 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-combined-ca-bundle\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623305 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e87b828-195d-4441-9e8a-5075c9e29b05-logs\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623346 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-combined-ca-bundle\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623364 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623384 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data-custom\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623418 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4qnr\" (UniqueName: \"kubernetes.io/projected/6e87b828-195d-4441-9e8a-5075c9e29b05-kube-api-access-t4qnr\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.623439 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d04d08-8a53-47ee-9a23-17a13763b032-logs\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.625512 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.626949 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.661368 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724621 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724697 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data-custom\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724740 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4qnr\" (UniqueName: \"kubernetes.io/projected/6e87b828-195d-4441-9e8a-5075c9e29b05-kube-api-access-t4qnr\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724766 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724792 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d04d08-8a53-47ee-9a23-17a13763b032-logs\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724829 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmzk8\" (UniqueName: \"kubernetes.io/projected/d7d04d08-8a53-47ee-9a23-17a13763b032-kube-api-access-fmzk8\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data-custom\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724903 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724930 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724949 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jft4c\" (UniqueName: \"kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724967 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-combined-ca-bundle\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.724990 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.725030 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e87b828-195d-4441-9e8a-5075c9e29b05-logs\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.725073 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.725094 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-combined-ca-bundle\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.726693 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e87b828-195d-4441-9e8a-5075c9e29b05-logs\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.727145 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7d04d08-8a53-47ee-9a23-17a13763b032-logs\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.751831 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-combined-ca-bundle\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.752386 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data-custom\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.752709 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data-custom\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.753474 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-config-data\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.754343 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e87b828-195d-4441-9e8a-5075c9e29b05-config-data\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.768577 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d04d08-8a53-47ee-9a23-17a13763b032-combined-ca-bundle\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.771119 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4qnr\" (UniqueName: \"kubernetes.io/projected/6e87b828-195d-4441-9e8a-5075c9e29b05-kube-api-access-t4qnr\") pod \"barbican-worker-65686d9dff-wlj6h\" (UID: \"6e87b828-195d-4441-9e8a-5075c9e29b05\") " pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.771224 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68978b84fd-g7kjs"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.779545 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmzk8\" (UniqueName: \"kubernetes.io/projected/d7d04d08-8a53-47ee-9a23-17a13763b032-kube-api-access-fmzk8\") pod \"barbican-keystone-listener-6dcc6c5b54-fpnl8\" (UID: \"d7d04d08-8a53-47ee-9a23-17a13763b032\") " pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.781899 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.786050 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.811353 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68978b84fd-g7kjs"] Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826250 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdp8q\" (UniqueName: \"kubernetes.io/projected/1906dda0-58cf-428f-8c14-6d0a7385cc1f-kube-api-access-kdp8q\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826307 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826333 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jft4c\" (UniqueName: \"kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826397 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826421 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-combined-ca-bundle\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826515 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data-custom\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826580 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1906dda0-58cf-428f-8c14-6d0a7385cc1f-logs\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.826605 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.828196 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.828315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.828354 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.828669 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.850341 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jft4c\" (UniqueName: \"kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c\") pod \"dnsmasq-dns-59d8787555-wbgq2\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.877267 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65686d9dff-wlj6h" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.903310 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.928385 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data-custom\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.928789 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1906dda0-58cf-428f-8c14-6d0a7385cc1f-logs\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.928833 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.928883 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdp8q\" (UniqueName: \"kubernetes.io/projected/1906dda0-58cf-428f-8c14-6d0a7385cc1f-kube-api-access-kdp8q\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.928943 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-combined-ca-bundle\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.929684 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1906dda0-58cf-428f-8c14-6d0a7385cc1f-logs\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.934592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data-custom\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.934941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-config-data\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.935496 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1906dda0-58cf-428f-8c14-6d0a7385cc1f-combined-ca-bundle\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.946060 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:49 crc kubenswrapper[4762]: I1009 14:56:49.948900 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdp8q\" (UniqueName: \"kubernetes.io/projected/1906dda0-58cf-428f-8c14-6d0a7385cc1f-kube-api-access-kdp8q\") pod \"barbican-api-68978b84fd-g7kjs\" (UID: \"1906dda0-58cf-428f-8c14-6d0a7385cc1f\") " pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:50 crc kubenswrapper[4762]: I1009 14:56:50.190021 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:50 crc kubenswrapper[4762]: I1009 14:56:50.354282 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65686d9dff-wlj6h"] Oct 09 14:56:50 crc kubenswrapper[4762]: I1009 14:56:50.424361 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8"] Oct 09 14:56:50 crc kubenswrapper[4762]: I1009 14:56:50.484873 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:56:50 crc kubenswrapper[4762]: W1009 14:56:50.496319 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69189058_9f42_4d05_9a54_c46dbb461485.slice/crio-497c2ccfaef83525de1c4616bbb4732a745797369e7aec8da0aa7e94d81d6dad WatchSource:0}: Error finding container 497c2ccfaef83525de1c4616bbb4732a745797369e7aec8da0aa7e94d81d6dad: Status 404 returned error can't find the container with id 497c2ccfaef83525de1c4616bbb4732a745797369e7aec8da0aa7e94d81d6dad Oct 09 14:56:50 crc kubenswrapper[4762]: I1009 14:56:50.692777 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68978b84fd-g7kjs"] Oct 09 14:56:50 crc kubenswrapper[4762]: W1009 14:56:50.702714 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1906dda0_58cf_428f_8c14_6d0a7385cc1f.slice/crio-472f54d7b915e734271530719a9963a95c02e4cd706a732e47133d0f9bd9f992 WatchSource:0}: Error finding container 472f54d7b915e734271530719a9963a95c02e4cd706a732e47133d0f9bd9f992: Status 404 returned error can't find the container with id 472f54d7b915e734271530719a9963a95c02e4cd706a732e47133d0f9bd9f992 Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.132194 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.135528 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.140909 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.249576 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b8kl\" (UniqueName: \"kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.249664 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.249716 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.308795 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68978b84fd-g7kjs" event={"ID":"1906dda0-58cf-428f-8c14-6d0a7385cc1f","Type":"ContainerStarted","Data":"b149b835eeaceb5630215719f72d1e1cc50b7fad34476be639aea39c20155fab"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.308839 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68978b84fd-g7kjs" event={"ID":"1906dda0-58cf-428f-8c14-6d0a7385cc1f","Type":"ContainerStarted","Data":"472f54d7b915e734271530719a9963a95c02e4cd706a732e47133d0f9bd9f992"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.310572 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" event={"ID":"d7d04d08-8a53-47ee-9a23-17a13763b032","Type":"ContainerStarted","Data":"fc111ba3afe3eb1bb7ebbde5c00d85c8ac83df7aca78fe36ec57e18944d353de"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.310617 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" event={"ID":"d7d04d08-8a53-47ee-9a23-17a13763b032","Type":"ContainerStarted","Data":"acdd215bd3c5e4e14cac6d63e5a89bc20ad2929a7786701e41938aa4b84f1597"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.312033 4762 generic.go:334] "Generic (PLEG): container finished" podID="69189058-9f42-4d05-9a54-c46dbb461485" containerID="3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8" exitCode=0 Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.312083 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" event={"ID":"69189058-9f42-4d05-9a54-c46dbb461485","Type":"ContainerDied","Data":"3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.312107 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" event={"ID":"69189058-9f42-4d05-9a54-c46dbb461485","Type":"ContainerStarted","Data":"497c2ccfaef83525de1c4616bbb4732a745797369e7aec8da0aa7e94d81d6dad"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.313901 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65686d9dff-wlj6h" event={"ID":"6e87b828-195d-4441-9e8a-5075c9e29b05","Type":"ContainerStarted","Data":"ed54a384f6f9e7675f4cb98f9ab898aa9c3d30a0d21b5a3246e5f53acfcc521b"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.313930 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65686d9dff-wlj6h" event={"ID":"6e87b828-195d-4441-9e8a-5075c9e29b05","Type":"ContainerStarted","Data":"f6d265fb7b48c1ed6d52f4bba73233449213de3ec7fe36f4a4bd21cf155dbd15"} Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.351185 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b8kl\" (UniqueName: \"kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.351249 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.351298 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.351842 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.352712 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.371020 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b8kl\" (UniqueName: \"kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl\") pod \"redhat-operators-7rfd9\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:51 crc kubenswrapper[4762]: I1009 14:56:51.623993 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.145817 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.337572 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" event={"ID":"69189058-9f42-4d05-9a54-c46dbb461485","Type":"ContainerStarted","Data":"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309"} Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.339153 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.345762 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65686d9dff-wlj6h" event={"ID":"6e87b828-195d-4441-9e8a-5075c9e29b05","Type":"ContainerStarted","Data":"ebf7194fc0add24b285ce7c996a110ec0f47f375c275b2d49d5a86c7e78d523d"} Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.356513 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerStarted","Data":"eea6228d6682f973c3fe983bfe3458e4a79bf66c67727d817d9ac33b43348570"} Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.358927 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68978b84fd-g7kjs" event={"ID":"1906dda0-58cf-428f-8c14-6d0a7385cc1f","Type":"ContainerStarted","Data":"8b3f689425eb201fa5042b42ddb1741fd6df408111372b26c865838260d133c0"} Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.359052 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.361837 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" event={"ID":"d7d04d08-8a53-47ee-9a23-17a13763b032","Type":"ContainerStarted","Data":"e1598726eba4197d568b4cf01e99048adce007ebba63bcea2e160319b15a4d6c"} Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.372786 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" podStartSLOduration=3.37276507 podStartE2EDuration="3.37276507s" podCreationTimestamp="2025-10-09 14:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:56:52.365281365 +0000 UTC m=+5488.139072404" watchObservedRunningTime="2025-10-09 14:56:52.37276507 +0000 UTC m=+5488.146556119" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.389785 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-65686d9dff-wlj6h" podStartSLOduration=3.389747503 podStartE2EDuration="3.389747503s" podCreationTimestamp="2025-10-09 14:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:56:52.382609797 +0000 UTC m=+5488.156400846" watchObservedRunningTime="2025-10-09 14:56:52.389747503 +0000 UTC m=+5488.163538582" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.407563 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6dcc6c5b54-fpnl8" podStartSLOduration=3.407540376 podStartE2EDuration="3.407540376s" podCreationTimestamp="2025-10-09 14:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:56:52.404948559 +0000 UTC m=+5488.178739598" watchObservedRunningTime="2025-10-09 14:56:52.407540376 +0000 UTC m=+5488.181331415" Oct 09 14:56:52 crc kubenswrapper[4762]: I1009 14:56:52.430454 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68978b84fd-g7kjs" podStartSLOduration=3.430426863 podStartE2EDuration="3.430426863s" podCreationTimestamp="2025-10-09 14:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:56:52.423191794 +0000 UTC m=+5488.196982833" watchObservedRunningTime="2025-10-09 14:56:52.430426863 +0000 UTC m=+5488.204217902" Oct 09 14:56:53 crc kubenswrapper[4762]: I1009 14:56:53.387242 4762 generic.go:334] "Generic (PLEG): container finished" podID="1a814409-d984-429a-b745-ca7981f4653e" containerID="116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654" exitCode=0 Oct 09 14:56:53 crc kubenswrapper[4762]: I1009 14:56:53.387290 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerDied","Data":"116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654"} Oct 09 14:56:53 crc kubenswrapper[4762]: I1009 14:56:53.389680 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:56:53 crc kubenswrapper[4762]: I1009 14:56:53.391915 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 14:56:55 crc kubenswrapper[4762]: I1009 14:56:55.410820 4762 generic.go:334] "Generic (PLEG): container finished" podID="1a814409-d984-429a-b745-ca7981f4653e" containerID="130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68" exitCode=0 Oct 09 14:56:55 crc kubenswrapper[4762]: I1009 14:56:55.410938 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerDied","Data":"130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68"} Oct 09 14:56:57 crc kubenswrapper[4762]: I1009 14:56:57.444076 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerStarted","Data":"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af"} Oct 09 14:56:57 crc kubenswrapper[4762]: I1009 14:56:57.467909 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7rfd9" podStartSLOduration=3.471229329 podStartE2EDuration="6.467873498s" podCreationTimestamp="2025-10-09 14:56:51 +0000 UTC" firstStartedPulling="2025-10-09 14:56:53.391671244 +0000 UTC m=+5489.165462283" lastFinishedPulling="2025-10-09 14:56:56.388315413 +0000 UTC m=+5492.162106452" observedRunningTime="2025-10-09 14:56:57.467658311 +0000 UTC m=+5493.241449350" watchObservedRunningTime="2025-10-09 14:56:57.467873498 +0000 UTC m=+5493.241664537" Oct 09 14:56:57 crc kubenswrapper[4762]: I1009 14:56:57.965592 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:56:57 crc kubenswrapper[4762]: E1009 14:56:57.965909 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:56:59 crc kubenswrapper[4762]: I1009 14:56:59.951056 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:57:00 crc kubenswrapper[4762]: I1009 14:57:00.006167 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:57:00 crc kubenswrapper[4762]: I1009 14:57:00.006407 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="dnsmasq-dns" containerID="cri-o://134cd8e0b9e042a3a888d3300b1a346ee8f94afb5ddc0db2077a9c336c51f519" gracePeriod=10 Oct 09 14:57:00 crc kubenswrapper[4762]: I1009 14:57:00.472210 4762 generic.go:334] "Generic (PLEG): container finished" podID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerID="134cd8e0b9e042a3a888d3300b1a346ee8f94afb5ddc0db2077a9c336c51f519" exitCode=0 Oct 09 14:57:00 crc kubenswrapper[4762]: I1009 14:57:00.472267 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" event={"ID":"499e5912-ebea-47bd-9a52-2b9d51b7bfe2","Type":"ContainerDied","Data":"134cd8e0b9e042a3a888d3300b1a346ee8f94afb5ddc0db2077a9c336c51f519"} Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.130579 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.230722 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc\") pod \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.230778 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb\") pod \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.230883 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhh6g\" (UniqueName: \"kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g\") pod \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.230920 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb\") pod \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.230999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config\") pod \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\" (UID: \"499e5912-ebea-47bd-9a52-2b9d51b7bfe2\") " Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.239401 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g" (OuterVolumeSpecName: "kube-api-access-rhh6g") pod "499e5912-ebea-47bd-9a52-2b9d51b7bfe2" (UID: "499e5912-ebea-47bd-9a52-2b9d51b7bfe2"). InnerVolumeSpecName "kube-api-access-rhh6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.285026 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "499e5912-ebea-47bd-9a52-2b9d51b7bfe2" (UID: "499e5912-ebea-47bd-9a52-2b9d51b7bfe2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.294557 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "499e5912-ebea-47bd-9a52-2b9d51b7bfe2" (UID: "499e5912-ebea-47bd-9a52-2b9d51b7bfe2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.295066 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config" (OuterVolumeSpecName: "config") pod "499e5912-ebea-47bd-9a52-2b9d51b7bfe2" (UID: "499e5912-ebea-47bd-9a52-2b9d51b7bfe2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.300719 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "499e5912-ebea-47bd-9a52-2b9d51b7bfe2" (UID: "499e5912-ebea-47bd-9a52-2b9d51b7bfe2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.333355 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhh6g\" (UniqueName: \"kubernetes.io/projected/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-kube-api-access-rhh6g\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.333607 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.333702 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.333769 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.333831 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499e5912-ebea-47bd-9a52-2b9d51b7bfe2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.483796 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" event={"ID":"499e5912-ebea-47bd-9a52-2b9d51b7bfe2","Type":"ContainerDied","Data":"a7f3c6a05a904a99c24ff04e6c955fe43955a3f237d28a2d84057b509a79fd17"} Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.483842 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8b9b55c-z9d5q" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.483858 4762 scope.go:117] "RemoveContainer" containerID="134cd8e0b9e042a3a888d3300b1a346ee8f94afb5ddc0db2077a9c336c51f519" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.506821 4762 scope.go:117] "RemoveContainer" containerID="2c3ceb634c42f3e3752c1d8c9cb3a7021251f88235822fc719e1e3bb36ff73e8" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.521383 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.529119 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f8b9b55c-z9d5q"] Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.624749 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.626037 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.682823 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.697478 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:57:01 crc kubenswrapper[4762]: I1009 14:57:01.790328 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-68978b84fd-g7kjs" Oct 09 14:57:02 crc kubenswrapper[4762]: I1009 14:57:02.553268 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:02 crc kubenswrapper[4762]: I1009 14:57:02.609259 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:57:02 crc kubenswrapper[4762]: I1009 14:57:02.984309 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" path="/var/lib/kubelet/pods/499e5912-ebea-47bd-9a52-2b9d51b7bfe2/volumes" Oct 09 14:57:04 crc kubenswrapper[4762]: I1009 14:57:04.511454 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7rfd9" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="registry-server" containerID="cri-o://1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af" gracePeriod=2 Oct 09 14:57:04 crc kubenswrapper[4762]: I1009 14:57:04.958059 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.098148 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content\") pod \"1a814409-d984-429a-b745-ca7981f4653e\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.105365 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b8kl\" (UniqueName: \"kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl\") pod \"1a814409-d984-429a-b745-ca7981f4653e\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.105532 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities\") pod \"1a814409-d984-429a-b745-ca7981f4653e\" (UID: \"1a814409-d984-429a-b745-ca7981f4653e\") " Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.106414 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities" (OuterVolumeSpecName: "utilities") pod "1a814409-d984-429a-b745-ca7981f4653e" (UID: "1a814409-d984-429a-b745-ca7981f4653e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.107130 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.112788 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl" (OuterVolumeSpecName: "kube-api-access-8b8kl") pod "1a814409-d984-429a-b745-ca7981f4653e" (UID: "1a814409-d984-429a-b745-ca7981f4653e"). InnerVolumeSpecName "kube-api-access-8b8kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.183449 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a814409-d984-429a-b745-ca7981f4653e" (UID: "1a814409-d984-429a-b745-ca7981f4653e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.208464 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b8kl\" (UniqueName: \"kubernetes.io/projected/1a814409-d984-429a-b745-ca7981f4653e-kube-api-access-8b8kl\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.208490 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a814409-d984-429a-b745-ca7981f4653e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.526474 4762 generic.go:334] "Generic (PLEG): container finished" podID="1a814409-d984-429a-b745-ca7981f4653e" containerID="1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af" exitCode=0 Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.526564 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerDied","Data":"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af"} Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.526618 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rfd9" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.526693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rfd9" event={"ID":"1a814409-d984-429a-b745-ca7981f4653e","Type":"ContainerDied","Data":"eea6228d6682f973c3fe983bfe3458e4a79bf66c67727d817d9ac33b43348570"} Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.526727 4762 scope.go:117] "RemoveContainer" containerID="1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.571181 4762 scope.go:117] "RemoveContainer" containerID="130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.584896 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.592918 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7rfd9"] Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.597714 4762 scope.go:117] "RemoveContainer" containerID="116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.634209 4762 scope.go:117] "RemoveContainer" containerID="1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af" Oct 09 14:57:05 crc kubenswrapper[4762]: E1009 14:57:05.634744 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af\": container with ID starting with 1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af not found: ID does not exist" containerID="1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.634790 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af"} err="failed to get container status \"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af\": rpc error: code = NotFound desc = could not find container \"1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af\": container with ID starting with 1d0432bad18f587ddffd3bc58efe757e104235b395f4e8c666aff80ba548f5af not found: ID does not exist" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.634821 4762 scope.go:117] "RemoveContainer" containerID="130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68" Oct 09 14:57:05 crc kubenswrapper[4762]: E1009 14:57:05.635393 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68\": container with ID starting with 130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68 not found: ID does not exist" containerID="130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.635507 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68"} err="failed to get container status \"130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68\": rpc error: code = NotFound desc = could not find container \"130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68\": container with ID starting with 130822153e1d65f6b2548ce7dcf36dcddee823b9ce43db9688b6b8b37dee9a68 not found: ID does not exist" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.635546 4762 scope.go:117] "RemoveContainer" containerID="116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654" Oct 09 14:57:05 crc kubenswrapper[4762]: E1009 14:57:05.636139 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654\": container with ID starting with 116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654 not found: ID does not exist" containerID="116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654" Oct 09 14:57:05 crc kubenswrapper[4762]: I1009 14:57:05.636164 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654"} err="failed to get container status \"116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654\": rpc error: code = NotFound desc = could not find container \"116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654\": container with ID starting with 116b38b5f2c51f764f05c0c7abbbe12bc7d120788773086a753741cf28b0b654 not found: ID does not exist" Oct 09 14:57:06 crc kubenswrapper[4762]: I1009 14:57:06.975624 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a814409-d984-429a-b745-ca7981f4653e" path="/var/lib/kubelet/pods/1a814409-d984-429a-b745-ca7981f4653e/volumes" Oct 09 14:57:09 crc kubenswrapper[4762]: I1009 14:57:09.972678 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:57:09 crc kubenswrapper[4762]: E1009 14:57:09.973422 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.310190 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-hxl4x"] Oct 09 14:57:15 crc kubenswrapper[4762]: E1009 14:57:15.311236 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="dnsmasq-dns" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311254 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="dnsmasq-dns" Oct 09 14:57:15 crc kubenswrapper[4762]: E1009 14:57:15.311270 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="registry-server" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311276 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="registry-server" Oct 09 14:57:15 crc kubenswrapper[4762]: E1009 14:57:15.311296 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="extract-content" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311303 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="extract-content" Oct 09 14:57:15 crc kubenswrapper[4762]: E1009 14:57:15.311312 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="extract-utilities" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311319 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="extract-utilities" Oct 09 14:57:15 crc kubenswrapper[4762]: E1009 14:57:15.311330 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="init" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311337 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="init" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311548 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="499e5912-ebea-47bd-9a52-2b9d51b7bfe2" containerName="dnsmasq-dns" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.311568 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a814409-d984-429a-b745-ca7981f4653e" containerName="registry-server" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.312280 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.321622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hxl4x"] Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.482527 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7g7c\" (UniqueName: \"kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c\") pod \"neutron-db-create-hxl4x\" (UID: \"db783ecf-d652-4d2a-9f3f-f24914722aa8\") " pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.583940 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7g7c\" (UniqueName: \"kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c\") pod \"neutron-db-create-hxl4x\" (UID: \"db783ecf-d652-4d2a-9f3f-f24914722aa8\") " pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.604131 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7g7c\" (UniqueName: \"kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c\") pod \"neutron-db-create-hxl4x\" (UID: \"db783ecf-d652-4d2a-9f3f-f24914722aa8\") " pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:15 crc kubenswrapper[4762]: I1009 14:57:15.633911 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:16 crc kubenswrapper[4762]: I1009 14:57:16.075113 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hxl4x"] Oct 09 14:57:16 crc kubenswrapper[4762]: I1009 14:57:16.641484 4762 generic.go:334] "Generic (PLEG): container finished" podID="db783ecf-d652-4d2a-9f3f-f24914722aa8" containerID="b4e08d50e4999114c3a7c9bfc2ed189dfb9cf8355fb5bc69672bcefcef912162" exitCode=0 Oct 09 14:57:16 crc kubenswrapper[4762]: I1009 14:57:16.641581 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hxl4x" event={"ID":"db783ecf-d652-4d2a-9f3f-f24914722aa8","Type":"ContainerDied","Data":"b4e08d50e4999114c3a7c9bfc2ed189dfb9cf8355fb5bc69672bcefcef912162"} Oct 09 14:57:16 crc kubenswrapper[4762]: I1009 14:57:16.641887 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hxl4x" event={"ID":"db783ecf-d652-4d2a-9f3f-f24914722aa8","Type":"ContainerStarted","Data":"c79a6499113e608180d09e92f4c9f180d42d8744862324e8685dbb8e69468b8d"} Oct 09 14:57:17 crc kubenswrapper[4762]: I1009 14:57:17.956622 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.036274 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7g7c\" (UniqueName: \"kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c\") pod \"db783ecf-d652-4d2a-9f3f-f24914722aa8\" (UID: \"db783ecf-d652-4d2a-9f3f-f24914722aa8\") " Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.043358 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c" (OuterVolumeSpecName: "kube-api-access-v7g7c") pod "db783ecf-d652-4d2a-9f3f-f24914722aa8" (UID: "db783ecf-d652-4d2a-9f3f-f24914722aa8"). InnerVolumeSpecName "kube-api-access-v7g7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.138480 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7g7c\" (UniqueName: \"kubernetes.io/projected/db783ecf-d652-4d2a-9f3f-f24914722aa8-kube-api-access-v7g7c\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.662286 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hxl4x" event={"ID":"db783ecf-d652-4d2a-9f3f-f24914722aa8","Type":"ContainerDied","Data":"c79a6499113e608180d09e92f4c9f180d42d8744862324e8685dbb8e69468b8d"} Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.662341 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c79a6499113e608180d09e92f4c9f180d42d8744862324e8685dbb8e69468b8d" Oct 09 14:57:18 crc kubenswrapper[4762]: I1009 14:57:18.662412 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hxl4x" Oct 09 14:57:24 crc kubenswrapper[4762]: I1009 14:57:24.974107 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:57:24 crc kubenswrapper[4762]: E1009 14:57:24.975200 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.434239 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-11f8-account-create-8r2bz"] Oct 09 14:57:25 crc kubenswrapper[4762]: E1009 14:57:25.435276 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db783ecf-d652-4d2a-9f3f-f24914722aa8" containerName="mariadb-database-create" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.435388 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="db783ecf-d652-4d2a-9f3f-f24914722aa8" containerName="mariadb-database-create" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.435779 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="db783ecf-d652-4d2a-9f3f-f24914722aa8" containerName="mariadb-database-create" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.436910 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.441250 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.447716 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-11f8-account-create-8r2bz"] Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.612914 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ff9t\" (UniqueName: \"kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t\") pod \"neutron-11f8-account-create-8r2bz\" (UID: \"9f26744c-c79a-497e-86e3-671990b949cf\") " pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.714597 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ff9t\" (UniqueName: \"kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t\") pod \"neutron-11f8-account-create-8r2bz\" (UID: \"9f26744c-c79a-497e-86e3-671990b949cf\") " pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.744435 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ff9t\" (UniqueName: \"kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t\") pod \"neutron-11f8-account-create-8r2bz\" (UID: \"9f26744c-c79a-497e-86e3-671990b949cf\") " pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:25 crc kubenswrapper[4762]: I1009 14:57:25.764394 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:26 crc kubenswrapper[4762]: I1009 14:57:26.179458 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-11f8-account-create-8r2bz"] Oct 09 14:57:26 crc kubenswrapper[4762]: I1009 14:57:26.746405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-11f8-account-create-8r2bz" event={"ID":"9f26744c-c79a-497e-86e3-671990b949cf","Type":"ContainerStarted","Data":"027a2f57ca2d6c1c0fc8f14aefdcc01bf5afec285f78e7273dd8fbcea541ec35"} Oct 09 14:57:27 crc kubenswrapper[4762]: E1009 14:57:27.501767 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f26744c_c79a_497e_86e3_671990b949cf.slice/crio-b099ada6a97df818a3b684c9ee23e0829f81d05f2900ee0dad2f2c9f6aef4121.scope\": RecentStats: unable to find data in memory cache]" Oct 09 14:57:27 crc kubenswrapper[4762]: I1009 14:57:27.755918 4762 generic.go:334] "Generic (PLEG): container finished" podID="9f26744c-c79a-497e-86e3-671990b949cf" containerID="b099ada6a97df818a3b684c9ee23e0829f81d05f2900ee0dad2f2c9f6aef4121" exitCode=0 Oct 09 14:57:27 crc kubenswrapper[4762]: I1009 14:57:27.755968 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-11f8-account-create-8r2bz" event={"ID":"9f26744c-c79a-497e-86e3-671990b949cf","Type":"ContainerDied","Data":"b099ada6a97df818a3b684c9ee23e0829f81d05f2900ee0dad2f2c9f6aef4121"} Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.100161 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.196217 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ff9t\" (UniqueName: \"kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t\") pod \"9f26744c-c79a-497e-86e3-671990b949cf\" (UID: \"9f26744c-c79a-497e-86e3-671990b949cf\") " Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.202765 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t" (OuterVolumeSpecName: "kube-api-access-2ff9t") pod "9f26744c-c79a-497e-86e3-671990b949cf" (UID: "9f26744c-c79a-497e-86e3-671990b949cf"). InnerVolumeSpecName "kube-api-access-2ff9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.299010 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ff9t\" (UniqueName: \"kubernetes.io/projected/9f26744c-c79a-497e-86e3-671990b949cf-kube-api-access-2ff9t\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.781316 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-11f8-account-create-8r2bz" event={"ID":"9f26744c-c79a-497e-86e3-671990b949cf","Type":"ContainerDied","Data":"027a2f57ca2d6c1c0fc8f14aefdcc01bf5afec285f78e7273dd8fbcea541ec35"} Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.781929 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="027a2f57ca2d6c1c0fc8f14aefdcc01bf5afec285f78e7273dd8fbcea541ec35" Oct 09 14:57:29 crc kubenswrapper[4762]: I1009 14:57:29.781378 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-11f8-account-create-8r2bz" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.674813 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-nd6kt"] Oct 09 14:57:30 crc kubenswrapper[4762]: E1009 14:57:30.675261 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f26744c-c79a-497e-86e3-671990b949cf" containerName="mariadb-account-create" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.675281 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f26744c-c79a-497e-86e3-671990b949cf" containerName="mariadb-account-create" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.675509 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f26744c-c79a-497e-86e3-671990b949cf" containerName="mariadb-account-create" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.676287 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.678736 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.678950 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nw48t" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.679169 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.688155 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nd6kt"] Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.727595 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.727681 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.727823 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hc4b\" (UniqueName: \"kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.829166 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.829228 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.829345 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hc4b\" (UniqueName: \"kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.833815 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.834053 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.857673 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hc4b\" (UniqueName: \"kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b\") pod \"neutron-db-sync-nd6kt\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:30 crc kubenswrapper[4762]: I1009 14:57:30.997187 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:31 crc kubenswrapper[4762]: W1009 14:57:31.575714 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode522e18c_0226_4f6b_bb52_74435e991373.slice/crio-863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d WatchSource:0}: Error finding container 863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d: Status 404 returned error can't find the container with id 863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d Oct 09 14:57:31 crc kubenswrapper[4762]: I1009 14:57:31.577799 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nd6kt"] Oct 09 14:57:31 crc kubenswrapper[4762]: I1009 14:57:31.805598 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nd6kt" event={"ID":"e522e18c-0226-4f6b-bb52-74435e991373","Type":"ContainerStarted","Data":"9c5462953cd08ce2e9fa02aa83040fac8cf875c8a7ad10f5ba77f43c1c4a623a"} Oct 09 14:57:31 crc kubenswrapper[4762]: I1009 14:57:31.806011 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nd6kt" event={"ID":"e522e18c-0226-4f6b-bb52-74435e991373","Type":"ContainerStarted","Data":"863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d"} Oct 09 14:57:31 crc kubenswrapper[4762]: I1009 14:57:31.828188 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-nd6kt" podStartSLOduration=1.8281688040000001 podStartE2EDuration="1.828168804s" podCreationTimestamp="2025-10-09 14:57:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:57:31.821556511 +0000 UTC m=+5527.595347560" watchObservedRunningTime="2025-10-09 14:57:31.828168804 +0000 UTC m=+5527.601959843" Oct 09 14:57:36 crc kubenswrapper[4762]: I1009 14:57:36.855254 4762 generic.go:334] "Generic (PLEG): container finished" podID="e522e18c-0226-4f6b-bb52-74435e991373" containerID="9c5462953cd08ce2e9fa02aa83040fac8cf875c8a7ad10f5ba77f43c1c4a623a" exitCode=0 Oct 09 14:57:36 crc kubenswrapper[4762]: I1009 14:57:36.855419 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nd6kt" event={"ID":"e522e18c-0226-4f6b-bb52-74435e991373","Type":"ContainerDied","Data":"9c5462953cd08ce2e9fa02aa83040fac8cf875c8a7ad10f5ba77f43c1c4a623a"} Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.163278 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.279064 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config\") pod \"e522e18c-0226-4f6b-bb52-74435e991373\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.279198 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hc4b\" (UniqueName: \"kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b\") pod \"e522e18c-0226-4f6b-bb52-74435e991373\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.279281 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle\") pod \"e522e18c-0226-4f6b-bb52-74435e991373\" (UID: \"e522e18c-0226-4f6b-bb52-74435e991373\") " Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.284954 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b" (OuterVolumeSpecName: "kube-api-access-5hc4b") pod "e522e18c-0226-4f6b-bb52-74435e991373" (UID: "e522e18c-0226-4f6b-bb52-74435e991373"). InnerVolumeSpecName "kube-api-access-5hc4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.302912 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config" (OuterVolumeSpecName: "config") pod "e522e18c-0226-4f6b-bb52-74435e991373" (UID: "e522e18c-0226-4f6b-bb52-74435e991373"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.305061 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e522e18c-0226-4f6b-bb52-74435e991373" (UID: "e522e18c-0226-4f6b-bb52-74435e991373"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.381739 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.381781 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e522e18c-0226-4f6b-bb52-74435e991373-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.381792 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hc4b\" (UniqueName: \"kubernetes.io/projected/e522e18c-0226-4f6b-bb52-74435e991373-kube-api-access-5hc4b\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.874937 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nd6kt" event={"ID":"e522e18c-0226-4f6b-bb52-74435e991373","Type":"ContainerDied","Data":"863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d"} Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.875533 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="863a32b0fe429325e19861c94e17a07b77351902eafbaf3681f062f1f074dc7d" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.875080 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nd6kt" Oct 09 14:57:38 crc kubenswrapper[4762]: I1009 14:57:38.966337 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:57:38 crc kubenswrapper[4762]: E1009 14:57:38.966540 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.110120 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:57:39 crc kubenswrapper[4762]: E1009 14:57:39.110708 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e522e18c-0226-4f6b-bb52-74435e991373" containerName="neutron-db-sync" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.110736 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e522e18c-0226-4f6b-bb52-74435e991373" containerName="neutron-db-sync" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.110981 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e522e18c-0226-4f6b-bb52-74435e991373" containerName="neutron-db-sync" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.112398 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.142666 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.194438 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.194505 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.194545 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.194581 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.194610 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25hnz\" (UniqueName: \"kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.268179 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7c86b5d68f-bbzc9"] Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.269751 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.272063 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.272264 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-nw48t" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.272434 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.293465 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c86b5d68f-bbzc9"] Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297557 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297602 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-httpd-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297680 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25hnz\" (UniqueName: \"kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297743 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297764 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gn7c\" (UniqueName: \"kubernetes.io/projected/28024491-18be-45b0-b38f-a5d3cc418127-kube-api-access-2gn7c\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297786 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297823 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297856 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.297882 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-combined-ca-bundle\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.298703 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.299454 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.301035 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.301405 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.317429 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25hnz\" (UniqueName: \"kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz\") pod \"dnsmasq-dns-6fd4468769-4ghqc\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.398770 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.399078 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-combined-ca-bundle\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.399105 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-httpd-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.399175 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gn7c\" (UniqueName: \"kubernetes.io/projected/28024491-18be-45b0-b38f-a5d3cc418127-kube-api-access-2gn7c\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.403712 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-httpd-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.404859 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-combined-ca-bundle\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.406495 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/28024491-18be-45b0-b38f-a5d3cc418127-config\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.420325 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gn7c\" (UniqueName: \"kubernetes.io/projected/28024491-18be-45b0-b38f-a5d3cc418127-kube-api-access-2gn7c\") pod \"neutron-7c86b5d68f-bbzc9\" (UID: \"28024491-18be-45b0-b38f-a5d3cc418127\") " pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.432418 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.591515 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:39 crc kubenswrapper[4762]: I1009 14:57:39.890689 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.146720 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c86b5d68f-bbzc9"] Oct 09 14:57:40 crc kubenswrapper[4762]: W1009 14:57:40.178486 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28024491_18be_45b0_b38f_a5d3cc418127.slice/crio-686f11be32d02ef8db5ef5d6f73958fe5a2e83d7f629487431c613e303d3e4b7 WatchSource:0}: Error finding container 686f11be32d02ef8db5ef5d6f73958fe5a2e83d7f629487431c613e303d3e4b7: Status 404 returned error can't find the container with id 686f11be32d02ef8db5ef5d6f73958fe5a2e83d7f629487431c613e303d3e4b7 Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.891088 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c86b5d68f-bbzc9" event={"ID":"28024491-18be-45b0-b38f-a5d3cc418127","Type":"ContainerStarted","Data":"f37225ec13db9e368d3e2afd5276a3bdf30cfcb7bde357c2d5fbf8f5d0f11635"} Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.892446 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c86b5d68f-bbzc9" event={"ID":"28024491-18be-45b0-b38f-a5d3cc418127","Type":"ContainerStarted","Data":"e71bbddf7f2064d66c49b52d2bcaef193f66770ad1bb905e6d1180eb6c273ba5"} Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.892589 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.892683 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c86b5d68f-bbzc9" event={"ID":"28024491-18be-45b0-b38f-a5d3cc418127","Type":"ContainerStarted","Data":"686f11be32d02ef8db5ef5d6f73958fe5a2e83d7f629487431c613e303d3e4b7"} Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.893126 4762 generic.go:334] "Generic (PLEG): container finished" podID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerID="b39268e6efa97c31a30e20380f1a13b38b7fe6169103769591f3e5c125c9bf12" exitCode=0 Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.893174 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" event={"ID":"8dbb926d-7b4f-46f7-801b-5c02b1c6becd","Type":"ContainerDied","Data":"b39268e6efa97c31a30e20380f1a13b38b7fe6169103769591f3e5c125c9bf12"} Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.893228 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" event={"ID":"8dbb926d-7b4f-46f7-801b-5c02b1c6becd","Type":"ContainerStarted","Data":"21f76590ad653e6ac455d36ddae3749ef3122d8596e258992e01b1c8f75ade6f"} Oct 09 14:57:40 crc kubenswrapper[4762]: I1009 14:57:40.910655 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7c86b5d68f-bbzc9" podStartSLOduration=1.910641322 podStartE2EDuration="1.910641322s" podCreationTimestamp="2025-10-09 14:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:57:40.909236475 +0000 UTC m=+5536.683027534" watchObservedRunningTime="2025-10-09 14:57:40.910641322 +0000 UTC m=+5536.684432361" Oct 09 14:57:41 crc kubenswrapper[4762]: I1009 14:57:41.902559 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" event={"ID":"8dbb926d-7b4f-46f7-801b-5c02b1c6becd","Type":"ContainerStarted","Data":"f86933fff62c2da56ad3b5bc0f39ea605a80029b09cbd911eaaec1cd8ffbe60b"} Oct 09 14:57:41 crc kubenswrapper[4762]: I1009 14:57:41.924889 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" podStartSLOduration=2.924863654 podStartE2EDuration="2.924863654s" podCreationTimestamp="2025-10-09 14:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:57:41.917772479 +0000 UTC m=+5537.691563518" watchObservedRunningTime="2025-10-09 14:57:41.924863654 +0000 UTC m=+5537.698654703" Oct 09 14:57:42 crc kubenswrapper[4762]: I1009 14:57:42.911274 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.434678 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.507200 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.507452 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="dnsmasq-dns" containerID="cri-o://8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309" gracePeriod=10 Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.965373 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:57:49 crc kubenswrapper[4762]: E1009 14:57:49.966001 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.977698 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.978139 4762 generic.go:334] "Generic (PLEG): container finished" podID="69189058-9f42-4d05-9a54-c46dbb461485" containerID="8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309" exitCode=0 Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.978180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" event={"ID":"69189058-9f42-4d05-9a54-c46dbb461485","Type":"ContainerDied","Data":"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309"} Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.978220 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" event={"ID":"69189058-9f42-4d05-9a54-c46dbb461485","Type":"ContainerDied","Data":"497c2ccfaef83525de1c4616bbb4732a745797369e7aec8da0aa7e94d81d6dad"} Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.978235 4762 scope.go:117] "RemoveContainer" containerID="8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309" Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.990514 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc\") pod \"69189058-9f42-4d05-9a54-c46dbb461485\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.990581 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jft4c\" (UniqueName: \"kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c\") pod \"69189058-9f42-4d05-9a54-c46dbb461485\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.990657 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb\") pod \"69189058-9f42-4d05-9a54-c46dbb461485\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.990688 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb\") pod \"69189058-9f42-4d05-9a54-c46dbb461485\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " Oct 09 14:57:49 crc kubenswrapper[4762]: I1009 14:57:49.990855 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config\") pod \"69189058-9f42-4d05-9a54-c46dbb461485\" (UID: \"69189058-9f42-4d05-9a54-c46dbb461485\") " Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.002321 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c" (OuterVolumeSpecName: "kube-api-access-jft4c") pod "69189058-9f42-4d05-9a54-c46dbb461485" (UID: "69189058-9f42-4d05-9a54-c46dbb461485"). InnerVolumeSpecName "kube-api-access-jft4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.015274 4762 scope.go:117] "RemoveContainer" containerID="3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.051728 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config" (OuterVolumeSpecName: "config") pod "69189058-9f42-4d05-9a54-c46dbb461485" (UID: "69189058-9f42-4d05-9a54-c46dbb461485"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.061317 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "69189058-9f42-4d05-9a54-c46dbb461485" (UID: "69189058-9f42-4d05-9a54-c46dbb461485"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.067565 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "69189058-9f42-4d05-9a54-c46dbb461485" (UID: "69189058-9f42-4d05-9a54-c46dbb461485"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.091422 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "69189058-9f42-4d05-9a54-c46dbb461485" (UID: "69189058-9f42-4d05-9a54-c46dbb461485"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.093081 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.093111 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jft4c\" (UniqueName: \"kubernetes.io/projected/69189058-9f42-4d05-9a54-c46dbb461485-kube-api-access-jft4c\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.093124 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.093134 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.093142 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69189058-9f42-4d05-9a54-c46dbb461485-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.112625 4762 scope.go:117] "RemoveContainer" containerID="8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309" Oct 09 14:57:50 crc kubenswrapper[4762]: E1009 14:57:50.113063 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309\": container with ID starting with 8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309 not found: ID does not exist" containerID="8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.113103 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309"} err="failed to get container status \"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309\": rpc error: code = NotFound desc = could not find container \"8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309\": container with ID starting with 8f5fc5be820d8e647d109f6500555d3c6f872a5b9ac944d44eacecd99386d309 not found: ID does not exist" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.113135 4762 scope.go:117] "RemoveContainer" containerID="3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8" Oct 09 14:57:50 crc kubenswrapper[4762]: E1009 14:57:50.115223 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8\": container with ID starting with 3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8 not found: ID does not exist" containerID="3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.115273 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8"} err="failed to get container status \"3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8\": rpc error: code = NotFound desc = could not find container \"3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8\": container with ID starting with 3428f4984cb38000a5dad0b26963c4f82bed1b432b0655f009b7a24249669fa8 not found: ID does not exist" Oct 09 14:57:50 crc kubenswrapper[4762]: I1009 14:57:50.991407 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" Oct 09 14:57:51 crc kubenswrapper[4762]: I1009 14:57:51.025048 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:57:51 crc kubenswrapper[4762]: I1009 14:57:51.039057 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d8787555-wbgq2"] Oct 09 14:57:52 crc kubenswrapper[4762]: I1009 14:57:52.977919 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69189058-9f42-4d05-9a54-c46dbb461485" path="/var/lib/kubelet/pods/69189058-9f42-4d05-9a54-c46dbb461485/volumes" Oct 09 14:57:54 crc kubenswrapper[4762]: I1009 14:57:54.947475 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-59d8787555-wbgq2" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.30:5353: i/o timeout" Oct 09 14:58:02 crc kubenswrapper[4762]: I1009 14:58:02.965235 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:58:02 crc kubenswrapper[4762]: E1009 14:58:02.966823 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.599091 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:04 crc kubenswrapper[4762]: E1009 14:58:04.599822 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="init" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.599837 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="init" Oct 09 14:58:04 crc kubenswrapper[4762]: E1009 14:58:04.599875 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="dnsmasq-dns" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.599881 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="dnsmasq-dns" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.600036 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="69189058-9f42-4d05-9a54-c46dbb461485" containerName="dnsmasq-dns" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.601472 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.613057 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.771625 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.771771 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4hkb\" (UniqueName: \"kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.771796 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.873152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4hkb\" (UniqueName: \"kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.873231 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.873340 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.873761 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.874008 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.890729 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4hkb\" (UniqueName: \"kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb\") pod \"certified-operators-qw482\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:04 crc kubenswrapper[4762]: I1009 14:58:04.928108 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:05 crc kubenswrapper[4762]: I1009 14:58:05.434846 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:06 crc kubenswrapper[4762]: I1009 14:58:06.137714 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerID="9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3" exitCode=0 Oct 09 14:58:06 crc kubenswrapper[4762]: I1009 14:58:06.137783 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerDied","Data":"9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3"} Oct 09 14:58:06 crc kubenswrapper[4762]: I1009 14:58:06.138082 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerStarted","Data":"a4275402e512d9b191d0e223c0e9f287e9ecb1be99378cdd78fcf491566b9518"} Oct 09 14:58:07 crc kubenswrapper[4762]: I1009 14:58:07.149020 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerStarted","Data":"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690"} Oct 09 14:58:08 crc kubenswrapper[4762]: I1009 14:58:08.160230 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerID="fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690" exitCode=0 Oct 09 14:58:08 crc kubenswrapper[4762]: I1009 14:58:08.160298 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerDied","Data":"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690"} Oct 09 14:58:09 crc kubenswrapper[4762]: I1009 14:58:09.187914 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerStarted","Data":"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce"} Oct 09 14:58:09 crc kubenswrapper[4762]: I1009 14:58:09.213160 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qw482" podStartSLOduration=2.776837751 podStartE2EDuration="5.213137996s" podCreationTimestamp="2025-10-09 14:58:04 +0000 UTC" firstStartedPulling="2025-10-09 14:58:06.142296084 +0000 UTC m=+5561.916087133" lastFinishedPulling="2025-10-09 14:58:08.578596339 +0000 UTC m=+5564.352387378" observedRunningTime="2025-10-09 14:58:09.20751527 +0000 UTC m=+5564.981306309" watchObservedRunningTime="2025-10-09 14:58:09.213137996 +0000 UTC m=+5564.986929035" Oct 09 14:58:09 crc kubenswrapper[4762]: I1009 14:58:09.600489 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7c86b5d68f-bbzc9" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.783129 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.785775 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.804044 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.819384 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.819453 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.819528 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn6rk\" (UniqueName: \"kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.921736 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn6rk\" (UniqueName: \"kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.922235 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.922716 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.922808 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.923063 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:11 crc kubenswrapper[4762]: I1009 14:58:11.943263 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn6rk\" (UniqueName: \"kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk\") pod \"redhat-marketplace-b57dh\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:12 crc kubenswrapper[4762]: I1009 14:58:12.119288 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:12 crc kubenswrapper[4762]: I1009 14:58:12.608941 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:12 crc kubenswrapper[4762]: W1009 14:58:12.616107 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09539a69_ab80_4742_ab2b_ddc15b687dd6.slice/crio-9dd9ec3025849136908bca8ae406353c320744a2bd2869b5de1bc82c0ce84b24 WatchSource:0}: Error finding container 9dd9ec3025849136908bca8ae406353c320744a2bd2869b5de1bc82c0ce84b24: Status 404 returned error can't find the container with id 9dd9ec3025849136908bca8ae406353c320744a2bd2869b5de1bc82c0ce84b24 Oct 09 14:58:13 crc kubenswrapper[4762]: I1009 14:58:13.225604 4762 generic.go:334] "Generic (PLEG): container finished" podID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerID="4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e" exitCode=0 Oct 09 14:58:13 crc kubenswrapper[4762]: I1009 14:58:13.225678 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerDied","Data":"4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e"} Oct 09 14:58:13 crc kubenswrapper[4762]: I1009 14:58:13.225715 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerStarted","Data":"9dd9ec3025849136908bca8ae406353c320744a2bd2869b5de1bc82c0ce84b24"} Oct 09 14:58:14 crc kubenswrapper[4762]: I1009 14:58:14.236013 4762 generic.go:334] "Generic (PLEG): container finished" podID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerID="bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112" exitCode=0 Oct 09 14:58:14 crc kubenswrapper[4762]: I1009 14:58:14.236089 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerDied","Data":"bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112"} Oct 09 14:58:14 crc kubenswrapper[4762]: I1009 14:58:14.930016 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:14 crc kubenswrapper[4762]: I1009 14:58:14.931118 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:14 crc kubenswrapper[4762]: I1009 14:58:14.974006 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:15 crc kubenswrapper[4762]: I1009 14:58:15.246999 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerStarted","Data":"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9"} Oct 09 14:58:15 crc kubenswrapper[4762]: I1009 14:58:15.267207 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b57dh" podStartSLOduration=2.726503674 podStartE2EDuration="4.267188097s" podCreationTimestamp="2025-10-09 14:58:11 +0000 UTC" firstStartedPulling="2025-10-09 14:58:13.22906265 +0000 UTC m=+5569.002853689" lastFinishedPulling="2025-10-09 14:58:14.769747063 +0000 UTC m=+5570.543538112" observedRunningTime="2025-10-09 14:58:15.261685804 +0000 UTC m=+5571.035476853" watchObservedRunningTime="2025-10-09 14:58:15.267188097 +0000 UTC m=+5571.040979136" Oct 09 14:58:15 crc kubenswrapper[4762]: I1009 14:58:15.302198 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:16 crc kubenswrapper[4762]: I1009 14:58:16.965222 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:58:16 crc kubenswrapper[4762]: E1009 14:58:16.965956 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.373265 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.373481 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qw482" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="registry-server" containerID="cri-o://89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce" gracePeriod=2 Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.581071 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-7cjp5"] Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.583256 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.595314 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-7cjp5"] Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.630106 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-889dj\" (UniqueName: \"kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj\") pod \"glance-db-create-7cjp5\" (UID: \"5c97dad5-9737-43d5-9b06-6a6eab836b92\") " pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.731298 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-889dj\" (UniqueName: \"kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj\") pod \"glance-db-create-7cjp5\" (UID: \"5c97dad5-9737-43d5-9b06-6a6eab836b92\") " pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.772656 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-889dj\" (UniqueName: \"kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj\") pod \"glance-db-create-7cjp5\" (UID: \"5c97dad5-9737-43d5-9b06-6a6eab836b92\") " pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.840910 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.917956 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.933718 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4hkb\" (UniqueName: \"kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb\") pod \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.933923 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content\") pod \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.933999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities\") pod \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\" (UID: \"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6\") " Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.936702 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities" (OuterVolumeSpecName: "utilities") pod "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" (UID: "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:17 crc kubenswrapper[4762]: I1009 14:58:17.945931 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb" (OuterVolumeSpecName: "kube-api-access-g4hkb") pod "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" (UID: "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6"). InnerVolumeSpecName "kube-api-access-g4hkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.041913 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.041982 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4hkb\" (UniqueName: \"kubernetes.io/projected/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-kube-api-access-g4hkb\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.273767 4762 generic.go:334] "Generic (PLEG): container finished" podID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerID="89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce" exitCode=0 Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.273983 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerDied","Data":"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce"} Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.274108 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qw482" event={"ID":"2c5e97af-42da-4ccc-b010-ce6a7fd4aab6","Type":"ContainerDied","Data":"a4275402e512d9b191d0e223c0e9f287e9ecb1be99378cdd78fcf491566b9518"} Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.274130 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qw482" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.274133 4762 scope.go:117] "RemoveContainer" containerID="89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.294348 4762 scope.go:117] "RemoveContainer" containerID="fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.316546 4762 scope.go:117] "RemoveContainer" containerID="9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.335989 4762 scope.go:117] "RemoveContainer" containerID="89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce" Oct 09 14:58:18 crc kubenswrapper[4762]: E1009 14:58:18.336369 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce\": container with ID starting with 89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce not found: ID does not exist" containerID="89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.336408 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce"} err="failed to get container status \"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce\": rpc error: code = NotFound desc = could not find container \"89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce\": container with ID starting with 89d1b7c10e88dc4373f490b5d80462e09295c37c0cacbb29ec6f50f39d3cdfce not found: ID does not exist" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.336434 4762 scope.go:117] "RemoveContainer" containerID="fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690" Oct 09 14:58:18 crc kubenswrapper[4762]: E1009 14:58:18.337367 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690\": container with ID starting with fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690 not found: ID does not exist" containerID="fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.337392 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690"} err="failed to get container status \"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690\": rpc error: code = NotFound desc = could not find container \"fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690\": container with ID starting with fe0ddf46f7127b1e6c890387841d273fb0cba027438c3073532d69b25b3c2690 not found: ID does not exist" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.337410 4762 scope.go:117] "RemoveContainer" containerID="9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3" Oct 09 14:58:18 crc kubenswrapper[4762]: E1009 14:58:18.337758 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3\": container with ID starting with 9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3 not found: ID does not exist" containerID="9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.337784 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3"} err="failed to get container status \"9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3\": rpc error: code = NotFound desc = could not find container \"9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3\": container with ID starting with 9efee44cab0f34a5789d80e28f336b671802406c71666a366ff65db3ec9136e3 not found: ID does not exist" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.353500 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-7cjp5"] Oct 09 14:58:18 crc kubenswrapper[4762]: W1009 14:58:18.357259 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c97dad5_9737_43d5_9b06_6a6eab836b92.slice/crio-c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0 WatchSource:0}: Error finding container c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0: Status 404 returned error can't find the container with id c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0 Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.558457 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" (UID: "2c5e97af-42da-4ccc-b010-ce6a7fd4aab6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.607763 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.614598 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qw482"] Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.653021 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:18 crc kubenswrapper[4762]: I1009 14:58:18.975998 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" path="/var/lib/kubelet/pods/2c5e97af-42da-4ccc-b010-ce6a7fd4aab6/volumes" Oct 09 14:58:19 crc kubenswrapper[4762]: I1009 14:58:19.286853 4762 generic.go:334] "Generic (PLEG): container finished" podID="5c97dad5-9737-43d5-9b06-6a6eab836b92" containerID="ddac7de601de87ea8deb21d5dede13585863054b7eb48bb8418be06df0854e33" exitCode=0 Oct 09 14:58:19 crc kubenswrapper[4762]: I1009 14:58:19.286897 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-7cjp5" event={"ID":"5c97dad5-9737-43d5-9b06-6a6eab836b92","Type":"ContainerDied","Data":"ddac7de601de87ea8deb21d5dede13585863054b7eb48bb8418be06df0854e33"} Oct 09 14:58:19 crc kubenswrapper[4762]: I1009 14:58:19.286924 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-7cjp5" event={"ID":"5c97dad5-9737-43d5-9b06-6a6eab836b92","Type":"ContainerStarted","Data":"c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0"} Oct 09 14:58:20 crc kubenswrapper[4762]: I1009 14:58:20.583497 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:20 crc kubenswrapper[4762]: I1009 14:58:20.686206 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-889dj\" (UniqueName: \"kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj\") pod \"5c97dad5-9737-43d5-9b06-6a6eab836b92\" (UID: \"5c97dad5-9737-43d5-9b06-6a6eab836b92\") " Oct 09 14:58:20 crc kubenswrapper[4762]: I1009 14:58:20.691879 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj" (OuterVolumeSpecName: "kube-api-access-889dj") pod "5c97dad5-9737-43d5-9b06-6a6eab836b92" (UID: "5c97dad5-9737-43d5-9b06-6a6eab836b92"). InnerVolumeSpecName "kube-api-access-889dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:20 crc kubenswrapper[4762]: I1009 14:58:20.788327 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-889dj\" (UniqueName: \"kubernetes.io/projected/5c97dad5-9737-43d5-9b06-6a6eab836b92-kube-api-access-889dj\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:21 crc kubenswrapper[4762]: I1009 14:58:21.303303 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-7cjp5" event={"ID":"5c97dad5-9737-43d5-9b06-6a6eab836b92","Type":"ContainerDied","Data":"c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0"} Oct 09 14:58:21 crc kubenswrapper[4762]: I1009 14:58:21.303539 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7ba5f124d4a1abbeb2dc44a377c39c2bc4e7560f6c1c229c666d14fa6b42fd0" Oct 09 14:58:21 crc kubenswrapper[4762]: I1009 14:58:21.303605 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-7cjp5" Oct 09 14:58:22 crc kubenswrapper[4762]: I1009 14:58:22.119525 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:22 crc kubenswrapper[4762]: I1009 14:58:22.119957 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:22 crc kubenswrapper[4762]: I1009 14:58:22.209323 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:22 crc kubenswrapper[4762]: I1009 14:58:22.368424 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:22 crc kubenswrapper[4762]: I1009 14:58:22.441859 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.333318 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b57dh" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="registry-server" containerID="cri-o://56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9" gracePeriod=2 Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.831238 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.960917 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content\") pod \"09539a69-ab80-4742-ab2b-ddc15b687dd6\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.961073 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn6rk\" (UniqueName: \"kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk\") pod \"09539a69-ab80-4742-ab2b-ddc15b687dd6\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.961127 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities\") pod \"09539a69-ab80-4742-ab2b-ddc15b687dd6\" (UID: \"09539a69-ab80-4742-ab2b-ddc15b687dd6\") " Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.962006 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities" (OuterVolumeSpecName: "utilities") pod "09539a69-ab80-4742-ab2b-ddc15b687dd6" (UID: "09539a69-ab80-4742-ab2b-ddc15b687dd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.966294 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk" (OuterVolumeSpecName: "kube-api-access-qn6rk") pod "09539a69-ab80-4742-ab2b-ddc15b687dd6" (UID: "09539a69-ab80-4742-ab2b-ddc15b687dd6"). InnerVolumeSpecName "kube-api-access-qn6rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:24 crc kubenswrapper[4762]: I1009 14:58:24.974147 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09539a69-ab80-4742-ab2b-ddc15b687dd6" (UID: "09539a69-ab80-4742-ab2b-ddc15b687dd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.063841 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.063902 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn6rk\" (UniqueName: \"kubernetes.io/projected/09539a69-ab80-4742-ab2b-ddc15b687dd6-kube-api-access-qn6rk\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.063918 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09539a69-ab80-4742-ab2b-ddc15b687dd6-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.343710 4762 generic.go:334] "Generic (PLEG): container finished" podID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerID="56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9" exitCode=0 Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.343771 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerDied","Data":"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9"} Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.343809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b57dh" event={"ID":"09539a69-ab80-4742-ab2b-ddc15b687dd6","Type":"ContainerDied","Data":"9dd9ec3025849136908bca8ae406353c320744a2bd2869b5de1bc82c0ce84b24"} Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.343830 4762 scope.go:117] "RemoveContainer" containerID="56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.343986 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b57dh" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.370865 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.379188 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b57dh"] Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.391273 4762 scope.go:117] "RemoveContainer" containerID="bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.412846 4762 scope.go:117] "RemoveContainer" containerID="4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.445060 4762 scope.go:117] "RemoveContainer" containerID="56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9" Oct 09 14:58:25 crc kubenswrapper[4762]: E1009 14:58:25.445713 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9\": container with ID starting with 56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9 not found: ID does not exist" containerID="56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.445774 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9"} err="failed to get container status \"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9\": rpc error: code = NotFound desc = could not find container \"56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9\": container with ID starting with 56403465ff560e092d416ba69c950500b8efa2e4b88eb673470bf1129dfff6d9 not found: ID does not exist" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.445810 4762 scope.go:117] "RemoveContainer" containerID="bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112" Oct 09 14:58:25 crc kubenswrapper[4762]: E1009 14:58:25.446214 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112\": container with ID starting with bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112 not found: ID does not exist" containerID="bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.446241 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112"} err="failed to get container status \"bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112\": rpc error: code = NotFound desc = could not find container \"bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112\": container with ID starting with bfba59b38446e1df2522cd9bbb7729ccdd7d3b276d530ef8e4ba6e68b1e09112 not found: ID does not exist" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.446256 4762 scope.go:117] "RemoveContainer" containerID="4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e" Oct 09 14:58:25 crc kubenswrapper[4762]: E1009 14:58:25.446588 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e\": container with ID starting with 4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e not found: ID does not exist" containerID="4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e" Oct 09 14:58:25 crc kubenswrapper[4762]: I1009 14:58:25.446609 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e"} err="failed to get container status \"4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e\": rpc error: code = NotFound desc = could not find container \"4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e\": container with ID starting with 4f16749492acd157f86d1d5c71131ded005eeb81398ddcc50bd615421833fd6e not found: ID does not exist" Oct 09 14:58:26 crc kubenswrapper[4762]: I1009 14:58:26.977984 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" path="/var/lib/kubelet/pods/09539a69-ab80-4742-ab2b-ddc15b687dd6/volumes" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.716731 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-06b1-account-create-ccvjs"] Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717248 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="extract-utilities" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717271 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="extract-utilities" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717298 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="extract-utilities" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717307 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="extract-utilities" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717322 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="extract-content" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717329 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="extract-content" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717353 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717363 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717379 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717389 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717402 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="extract-content" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717408 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="extract-content" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.717423 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c97dad5-9737-43d5-9b06-6a6eab836b92" containerName="mariadb-database-create" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717430 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c97dad5-9737-43d5-9b06-6a6eab836b92" containerName="mariadb-database-create" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717621 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c5e97af-42da-4ccc-b010-ce6a7fd4aab6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717657 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c97dad5-9737-43d5-9b06-6a6eab836b92" containerName="mariadb-database-create" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.717670 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="09539a69-ab80-4742-ab2b-ddc15b687dd6" containerName="registry-server" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.718470 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.720964 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.726261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-06b1-account-create-ccvjs"] Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.820806 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz8c4\" (UniqueName: \"kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4\") pod \"glance-06b1-account-create-ccvjs\" (UID: \"75969a22-cb73-4249-94b8-03e1fcbf4c4f\") " pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.855568 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.864797 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.869950 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.922840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz8c4\" (UniqueName: \"kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4\") pod \"glance-06b1-account-create-ccvjs\" (UID: \"75969a22-cb73-4249-94b8-03e1fcbf4c4f\") " pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.942611 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz8c4\" (UniqueName: \"kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4\") pod \"glance-06b1-account-create-ccvjs\" (UID: \"75969a22-cb73-4249-94b8-03e1fcbf4c4f\") " pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:27 crc kubenswrapper[4762]: I1009 14:58:27.965301 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:58:27 crc kubenswrapper[4762]: E1009 14:58:27.965542 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.024233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.024323 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.024719 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mq67\" (UniqueName: \"kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.038860 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.126887 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.127281 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mq67\" (UniqueName: \"kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.127426 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.127827 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.128117 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.145752 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mq67\" (UniqueName: \"kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67\") pod \"community-operators-fjclb\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.186520 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.553745 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-06b1-account-create-ccvjs"] Oct 09 14:58:28 crc kubenswrapper[4762]: W1009 14:58:28.557793 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75969a22_cb73_4249_94b8_03e1fcbf4c4f.slice/crio-5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2 WatchSource:0}: Error finding container 5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2: Status 404 returned error can't find the container with id 5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2 Oct 09 14:58:28 crc kubenswrapper[4762]: W1009 14:58:28.697409 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc1c3fa6_6bbc_4990_8679_567f609f7652.slice/crio-1089547e3e830f35e723d923ce171a51d20590b2572ea665ef6101c41ae482c9 WatchSource:0}: Error finding container 1089547e3e830f35e723d923ce171a51d20590b2572ea665ef6101c41ae482c9: Status 404 returned error can't find the container with id 1089547e3e830f35e723d923ce171a51d20590b2572ea665ef6101c41ae482c9 Oct 09 14:58:28 crc kubenswrapper[4762]: I1009 14:58:28.699549 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.392608 4762 generic.go:334] "Generic (PLEG): container finished" podID="75969a22-cb73-4249-94b8-03e1fcbf4c4f" containerID="d7d381eb1e6f629f3b92ec8ae1dc756a6d5556f3da26e3fce416995e83b59a7f" exitCode=0 Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.392691 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-06b1-account-create-ccvjs" event={"ID":"75969a22-cb73-4249-94b8-03e1fcbf4c4f","Type":"ContainerDied","Data":"d7d381eb1e6f629f3b92ec8ae1dc756a6d5556f3da26e3fce416995e83b59a7f"} Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.393244 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-06b1-account-create-ccvjs" event={"ID":"75969a22-cb73-4249-94b8-03e1fcbf4c4f","Type":"ContainerStarted","Data":"5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2"} Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.398716 4762 generic.go:334] "Generic (PLEG): container finished" podID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerID="ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b" exitCode=0 Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.398774 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerDied","Data":"ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b"} Oct 09 14:58:29 crc kubenswrapper[4762]: I1009 14:58:29.398802 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerStarted","Data":"1089547e3e830f35e723d923ce171a51d20590b2572ea665ef6101c41ae482c9"} Oct 09 14:58:30 crc kubenswrapper[4762]: I1009 14:58:30.409256 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerStarted","Data":"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3"} Oct 09 14:58:30 crc kubenswrapper[4762]: I1009 14:58:30.737020 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:30 crc kubenswrapper[4762]: I1009 14:58:30.887126 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz8c4\" (UniqueName: \"kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4\") pod \"75969a22-cb73-4249-94b8-03e1fcbf4c4f\" (UID: \"75969a22-cb73-4249-94b8-03e1fcbf4c4f\") " Oct 09 14:58:30 crc kubenswrapper[4762]: I1009 14:58:30.895694 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4" (OuterVolumeSpecName: "kube-api-access-vz8c4") pod "75969a22-cb73-4249-94b8-03e1fcbf4c4f" (UID: "75969a22-cb73-4249-94b8-03e1fcbf4c4f"). InnerVolumeSpecName "kube-api-access-vz8c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:30 crc kubenswrapper[4762]: I1009 14:58:30.990674 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz8c4\" (UniqueName: \"kubernetes.io/projected/75969a22-cb73-4249-94b8-03e1fcbf4c4f-kube-api-access-vz8c4\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:31 crc kubenswrapper[4762]: I1009 14:58:31.419963 4762 generic.go:334] "Generic (PLEG): container finished" podID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerID="17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3" exitCode=0 Oct 09 14:58:31 crc kubenswrapper[4762]: I1009 14:58:31.420049 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerDied","Data":"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3"} Oct 09 14:58:31 crc kubenswrapper[4762]: I1009 14:58:31.422206 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-06b1-account-create-ccvjs" event={"ID":"75969a22-cb73-4249-94b8-03e1fcbf4c4f","Type":"ContainerDied","Data":"5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2"} Oct 09 14:58:31 crc kubenswrapper[4762]: I1009 14:58:31.422254 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b1242fb6b5889dc7a672ad098be67a6f8679804b32484c2e4d3148caa6c29a2" Oct 09 14:58:31 crc kubenswrapper[4762]: I1009 14:58:31.422264 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-06b1-account-create-ccvjs" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.283958 4762 scope.go:117] "RemoveContainer" containerID="37f67fdb760e1fac6c232f660ec7778d29cf95da2c5b3ec9fa69c69d67ce2335" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.326424 4762 scope.go:117] "RemoveContainer" containerID="446ce32f085ff8ba0dbc0de3c89c47b0994580e08b0e9b95e5241f3ccbbf422c" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.434169 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerStarted","Data":"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004"} Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.458760 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fjclb" podStartSLOduration=3.021328213 podStartE2EDuration="5.458737527s" podCreationTimestamp="2025-10-09 14:58:27 +0000 UTC" firstStartedPulling="2025-10-09 14:58:29.401037606 +0000 UTC m=+5585.174828645" lastFinishedPulling="2025-10-09 14:58:31.83844691 +0000 UTC m=+5587.612237959" observedRunningTime="2025-10-09 14:58:32.45042688 +0000 UTC m=+5588.224217919" watchObservedRunningTime="2025-10-09 14:58:32.458737527 +0000 UTC m=+5588.232528566" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.850980 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-bm22n"] Oct 09 14:58:32 crc kubenswrapper[4762]: E1009 14:58:32.851409 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75969a22-cb73-4249-94b8-03e1fcbf4c4f" containerName="mariadb-account-create" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.851424 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="75969a22-cb73-4249-94b8-03e1fcbf4c4f" containerName="mariadb-account-create" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.852317 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="75969a22-cb73-4249-94b8-03e1fcbf4c4f" containerName="mariadb-account-create" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.853018 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.855375 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.855386 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zpb7m" Oct 09 14:58:32 crc kubenswrapper[4762]: I1009 14:58:32.866134 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bm22n"] Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.024976 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjf7t\" (UniqueName: \"kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.025244 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.025274 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.025312 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.127197 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjf7t\" (UniqueName: \"kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.127256 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.127322 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.127370 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.137598 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.153176 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.155227 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.169433 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjf7t\" (UniqueName: \"kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t\") pod \"glance-db-sync-bm22n\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.173066 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:33 crc kubenswrapper[4762]: I1009 14:58:33.747326 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bm22n"] Oct 09 14:58:33 crc kubenswrapper[4762]: W1009 14:58:33.750037 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7419200_74d1_4b46_9d46_2d143ae77596.slice/crio-1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65 WatchSource:0}: Error finding container 1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65: Status 404 returned error can't find the container with id 1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65 Oct 09 14:58:34 crc kubenswrapper[4762]: I1009 14:58:34.454597 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bm22n" event={"ID":"d7419200-74d1-4b46-9d46-2d143ae77596","Type":"ContainerStarted","Data":"2dfba6d76801e0075d34d7853c0c6081fce89e07b4709ddaf1234b73b404c0b0"} Oct 09 14:58:34 crc kubenswrapper[4762]: I1009 14:58:34.454978 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bm22n" event={"ID":"d7419200-74d1-4b46-9d46-2d143ae77596","Type":"ContainerStarted","Data":"1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65"} Oct 09 14:58:34 crc kubenswrapper[4762]: I1009 14:58:34.475143 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-bm22n" podStartSLOduration=2.475123738 podStartE2EDuration="2.475123738s" podCreationTimestamp="2025-10-09 14:58:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:34.468098265 +0000 UTC m=+5590.241889304" watchObservedRunningTime="2025-10-09 14:58:34.475123738 +0000 UTC m=+5590.248914777" Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.187970 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.188338 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.234365 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.494803 4762 generic.go:334] "Generic (PLEG): container finished" podID="d7419200-74d1-4b46-9d46-2d143ae77596" containerID="2dfba6d76801e0075d34d7853c0c6081fce89e07b4709ddaf1234b73b404c0b0" exitCode=0 Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.494896 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bm22n" event={"ID":"d7419200-74d1-4b46-9d46-2d143ae77596","Type":"ContainerDied","Data":"2dfba6d76801e0075d34d7853c0c6081fce89e07b4709ddaf1234b73b404c0b0"} Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.544174 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:38 crc kubenswrapper[4762]: I1009 14:58:38.585118 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:39 crc kubenswrapper[4762]: I1009 14:58:39.889627 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.071129 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data\") pod \"d7419200-74d1-4b46-9d46-2d143ae77596\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.071278 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjf7t\" (UniqueName: \"kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t\") pod \"d7419200-74d1-4b46-9d46-2d143ae77596\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.071331 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle\") pod \"d7419200-74d1-4b46-9d46-2d143ae77596\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.072106 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data\") pod \"d7419200-74d1-4b46-9d46-2d143ae77596\" (UID: \"d7419200-74d1-4b46-9d46-2d143ae77596\") " Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.076534 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t" (OuterVolumeSpecName: "kube-api-access-jjf7t") pod "d7419200-74d1-4b46-9d46-2d143ae77596" (UID: "d7419200-74d1-4b46-9d46-2d143ae77596"). InnerVolumeSpecName "kube-api-access-jjf7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.076649 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d7419200-74d1-4b46-9d46-2d143ae77596" (UID: "d7419200-74d1-4b46-9d46-2d143ae77596"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.097607 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7419200-74d1-4b46-9d46-2d143ae77596" (UID: "d7419200-74d1-4b46-9d46-2d143ae77596"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.113850 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data" (OuterVolumeSpecName: "config-data") pod "d7419200-74d1-4b46-9d46-2d143ae77596" (UID: "d7419200-74d1-4b46-9d46-2d143ae77596"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.175235 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.175271 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjf7t\" (UniqueName: \"kubernetes.io/projected/d7419200-74d1-4b46-9d46-2d143ae77596-kube-api-access-jjf7t\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.175282 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.175291 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7419200-74d1-4b46-9d46-2d143ae77596-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.511562 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bm22n" event={"ID":"d7419200-74d1-4b46-9d46-2d143ae77596","Type":"ContainerDied","Data":"1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65"} Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.511621 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1965a44baf4262d637fbea1efe728542351e769fb71a72ff9daa675f78c94c65" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.511731 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fjclb" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="registry-server" containerID="cri-o://4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004" gracePeriod=2 Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.512150 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bm22n" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.903603 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:40 crc kubenswrapper[4762]: E1009 14:58:40.904379 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7419200-74d1-4b46-9d46-2d143ae77596" containerName="glance-db-sync" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.904393 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7419200-74d1-4b46-9d46-2d143ae77596" containerName="glance-db-sync" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.904549 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7419200-74d1-4b46-9d46-2d143ae77596" containerName="glance-db-sync" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.905525 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.907612 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.910097 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.916478 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.916633 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zpb7m" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.943598 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.959719 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.961219 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.983556 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:58:40 crc kubenswrapper[4762]: I1009 14:58:40.991701 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.064715 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.065060 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="extract-content" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.065077 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="extract-content" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.065092 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="registry-server" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.065099 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="registry-server" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.065135 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="extract-utilities" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.065144 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="extract-utilities" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.065689 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerName="registry-server" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.066849 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.069334 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.083354 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.089552 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mq67\" (UniqueName: \"kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67\") pod \"bc1c3fa6-6bbc-4990-8679-567f609f7652\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.089695 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities\") pod \"bc1c3fa6-6bbc-4990-8679-567f609f7652\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.089740 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content\") pod \"bc1c3fa6-6bbc-4990-8679-567f609f7652\" (UID: \"bc1c3fa6-6bbc-4990-8679-567f609f7652\") " Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090047 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090086 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090120 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090149 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090178 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbt6q\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090204 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090226 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbjpv\" (UniqueName: \"kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090270 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090330 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090378 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090438 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.090502 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities" (OuterVolumeSpecName: "utilities") pod "bc1c3fa6-6bbc-4990-8679-567f609f7652" (UID: "bc1c3fa6-6bbc-4990-8679-567f609f7652"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.091708 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.094123 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67" (OuterVolumeSpecName: "kube-api-access-7mq67") pod "bc1c3fa6-6bbc-4990-8679-567f609f7652" (UID: "bc1c3fa6-6bbc-4990-8679-567f609f7652"). InnerVolumeSpecName "kube-api-access-7mq67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.153796 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc1c3fa6-6bbc-4990-8679-567f609f7652" (UID: "bc1c3fa6-6bbc-4990-8679-567f609f7652"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194731 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194801 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc979\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194835 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194882 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194930 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.194966 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195036 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195060 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195096 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195121 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195179 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbt6q\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195229 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbjpv\" (UniqueName: \"kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195251 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195273 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195296 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195333 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195385 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195451 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mq67\" (UniqueName: \"kubernetes.io/projected/bc1c3fa6-6bbc-4990-8679-567f609f7652-kube-api-access-7mq67\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.195469 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc1c3fa6-6bbc-4990-8679-567f609f7652-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.197527 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.197703 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.197743 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.198191 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.198334 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.199054 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.204412 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.205961 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.208708 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.215048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbjpv\" (UniqueName: \"kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv\") pod \"dnsmasq-dns-8455bb8bd7-nc52k\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.215313 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbt6q\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.217677 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph\") pod \"glance-default-external-api-0\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.296969 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297038 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297069 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc979\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297117 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297139 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.297209 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.298179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.298199 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.300941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.301249 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.301796 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.301964 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.311436 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.313705 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.317463 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc979\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979\") pod \"glance-default-internal-api-0\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.389750 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.526577 4762 generic.go:334] "Generic (PLEG): container finished" podID="bc1c3fa6-6bbc-4990-8679-567f609f7652" containerID="4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004" exitCode=0 Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.526713 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerDied","Data":"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004"} Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.526776 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjclb" event={"ID":"bc1c3fa6-6bbc-4990-8679-567f609f7652","Type":"ContainerDied","Data":"1089547e3e830f35e723d923ce171a51d20590b2572ea665ef6101c41ae482c9"} Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.526800 4762 scope.go:117] "RemoveContainer" containerID="4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.527014 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjclb" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.568307 4762 scope.go:117] "RemoveContainer" containerID="17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.578508 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.594271 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fjclb"] Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.599665 4762 scope.go:117] "RemoveContainer" containerID="ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.622444 4762 scope.go:117] "RemoveContainer" containerID="4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.622878 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004\": container with ID starting with 4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004 not found: ID does not exist" containerID="4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.622913 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004"} err="failed to get container status \"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004\": rpc error: code = NotFound desc = could not find container \"4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004\": container with ID starting with 4fe34c891b2e1da0bebcd7c66141a1b99599fa179d54fdb47fd0b78d427a3004 not found: ID does not exist" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.622939 4762 scope.go:117] "RemoveContainer" containerID="17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.623171 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3\": container with ID starting with 17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3 not found: ID does not exist" containerID="17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.623192 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3"} err="failed to get container status \"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3\": rpc error: code = NotFound desc = could not find container \"17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3\": container with ID starting with 17e570635b2c9f7861675ecc790d3e131558fee0deb8eed25ec91d69113d92d3 not found: ID does not exist" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.623208 4762 scope.go:117] "RemoveContainer" containerID="ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.623876 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b\": container with ID starting with ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b not found: ID does not exist" containerID="ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.623906 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b"} err="failed to get container status \"ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b\": rpc error: code = NotFound desc = could not find container \"ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b\": container with ID starting with ed62c43833074ee952d4e0b4293f92062fe5d53e53a9c0c2e72d422f3c66059b not found: ID does not exist" Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.902583 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.939747 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:41 crc kubenswrapper[4762]: I1009 14:58:41.965963 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:58:41 crc kubenswrapper[4762]: E1009 14:58:41.966257 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.037221 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:42 crc kubenswrapper[4762]: W1009 14:58:42.043917 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e79db9a_ff45_4f0e_ba46_517a0caa6ba7.slice/crio-5712d5211a9a5ff901875bac2902f05e8c2dcedaff49339357e26d844f14d631 WatchSource:0}: Error finding container 5712d5211a9a5ff901875bac2902f05e8c2dcedaff49339357e26d844f14d631: Status 404 returned error can't find the container with id 5712d5211a9a5ff901875bac2902f05e8c2dcedaff49339357e26d844f14d631 Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.329347 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.538316 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerStarted","Data":"5712d5211a9a5ff901875bac2902f05e8c2dcedaff49339357e26d844f14d631"} Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.539266 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerStarted","Data":"74a6a63ae86522d505463d653f75cf0ef8fa1bcb476590f3d17828913738a119"} Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.540969 4762 generic.go:334] "Generic (PLEG): container finished" podID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerID="1b2393445ed6c4392bc1c84ab2db85d421b2d0118ccee1f08c96b74c53d745da" exitCode=0 Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.541004 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" event={"ID":"537e6301-d98a-4829-b0e0-7d2b4dd95d6c","Type":"ContainerDied","Data":"1b2393445ed6c4392bc1c84ab2db85d421b2d0118ccee1f08c96b74c53d745da"} Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.541022 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" event={"ID":"537e6301-d98a-4829-b0e0-7d2b4dd95d6c","Type":"ContainerStarted","Data":"44335e817904bc3789a5386e9dd7670b16128afcf353bdf2cc02d92069701c9a"} Oct 09 14:58:42 crc kubenswrapper[4762]: I1009 14:58:42.982910 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc1c3fa6-6bbc-4990-8679-567f609f7652" path="/var/lib/kubelet/pods/bc1c3fa6-6bbc-4990-8679-567f609f7652/volumes" Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.552027 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerStarted","Data":"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897"} Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.552420 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerStarted","Data":"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32"} Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.552407 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-httpd" containerID="cri-o://1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" gracePeriod=30 Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.552161 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-log" containerID="cri-o://0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" gracePeriod=30 Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.558343 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" event={"ID":"537e6301-d98a-4829-b0e0-7d2b4dd95d6c","Type":"ContainerStarted","Data":"5fc2587c187ed65683ae06f2ceb6b1405128364dbbac66c86648ffb4b05ecd57"} Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.558429 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.561954 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerStarted","Data":"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a"} Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.562001 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerStarted","Data":"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4"} Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.578150 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.578130583 podStartE2EDuration="3.578130583s" podCreationTimestamp="2025-10-09 14:58:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:43.575397492 +0000 UTC m=+5599.349188531" watchObservedRunningTime="2025-10-09 14:58:43.578130583 +0000 UTC m=+5599.351921622" Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.597641 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" podStartSLOduration=3.597616161 podStartE2EDuration="3.597616161s" podCreationTimestamp="2025-10-09 14:58:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:43.595017853 +0000 UTC m=+5599.368808892" watchObservedRunningTime="2025-10-09 14:58:43.597616161 +0000 UTC m=+5599.371407200" Oct 09 14:58:43 crc kubenswrapper[4762]: I1009 14:58:43.613451 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.613432453 podStartE2EDuration="2.613432453s" podCreationTimestamp="2025-10-09 14:58:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:43.610429605 +0000 UTC m=+5599.384220644" watchObservedRunningTime="2025-10-09 14:58:43.613432453 +0000 UTC m=+5599.387223482" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.197894 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.359430 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbt6q\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.359731 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.359844 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.359964 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.360006 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.360032 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.360059 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data\") pod \"34f7541e-028e-454c-9af5-1e8da71ddfc5\" (UID: \"34f7541e-028e-454c-9af5-1e8da71ddfc5\") " Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.360941 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.361006 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs" (OuterVolumeSpecName: "logs") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.369877 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph" (OuterVolumeSpecName: "ceph") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.378010 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q" (OuterVolumeSpecName: "kube-api-access-pbt6q") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "kube-api-access-pbt6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.379084 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts" (OuterVolumeSpecName: "scripts") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.396168 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.424406 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data" (OuterVolumeSpecName: "config-data") pod "34f7541e-028e-454c-9af5-1e8da71ddfc5" (UID: "34f7541e-028e-454c-9af5-1e8da71ddfc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.461971 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462004 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462016 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/34f7541e-028e-454c-9af5-1e8da71ddfc5-logs\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462027 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462036 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbt6q\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-kube-api-access-pbt6q\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462046 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/34f7541e-028e-454c-9af5-1e8da71ddfc5-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.462054 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34f7541e-028e-454c-9af5-1e8da71ddfc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.572999 4762 generic.go:334] "Generic (PLEG): container finished" podID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerID="1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" exitCode=0 Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.573038 4762 generic.go:334] "Generic (PLEG): container finished" podID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerID="0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" exitCode=143 Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.574067 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.575959 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerDied","Data":"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897"} Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.576042 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerDied","Data":"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32"} Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.576062 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"34f7541e-028e-454c-9af5-1e8da71ddfc5","Type":"ContainerDied","Data":"74a6a63ae86522d505463d653f75cf0ef8fa1bcb476590f3d17828913738a119"} Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.576093 4762 scope.go:117] "RemoveContainer" containerID="1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.600820 4762 scope.go:117] "RemoveContainer" containerID="0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.612873 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.620458 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.635368 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:44 crc kubenswrapper[4762]: E1009 14:58:44.635761 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-log" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.635778 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-log" Oct 09 14:58:44 crc kubenswrapper[4762]: E1009 14:58:44.635805 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-httpd" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.635812 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-httpd" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.635977 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-log" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.635999 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" containerName="glance-httpd" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.636947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.639252 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.651015 4762 scope.go:117] "RemoveContainer" containerID="1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" Oct 09 14:58:44 crc kubenswrapper[4762]: E1009 14:58:44.652162 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897\": container with ID starting with 1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897 not found: ID does not exist" containerID="1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.652207 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897"} err="failed to get container status \"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897\": rpc error: code = NotFound desc = could not find container \"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897\": container with ID starting with 1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897 not found: ID does not exist" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.652237 4762 scope.go:117] "RemoveContainer" containerID="0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" Oct 09 14:58:44 crc kubenswrapper[4762]: E1009 14:58:44.653477 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32\": container with ID starting with 0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32 not found: ID does not exist" containerID="0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.653507 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32"} err="failed to get container status \"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32\": rpc error: code = NotFound desc = could not find container \"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32\": container with ID starting with 0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32 not found: ID does not exist" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.653523 4762 scope.go:117] "RemoveContainer" containerID="1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.653973 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897"} err="failed to get container status \"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897\": rpc error: code = NotFound desc = could not find container \"1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897\": container with ID starting with 1168268962ca9789532a6f2bd951b8fa59c2f79e35c99f1591d606323c7b0897 not found: ID does not exist" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.654026 4762 scope.go:117] "RemoveContainer" containerID="0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.654615 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32"} err="failed to get container status \"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32\": rpc error: code = NotFound desc = could not find container \"0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32\": container with ID starting with 0cc6cdef17b57ffd720e2a9e061816535e7f8d7509cee92c8d287beb3cbabe32 not found: ID does not exist" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.659827 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.733510 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.767978 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768249 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768288 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9r9s\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768328 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768440 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768472 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.768498 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.870556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.870886 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.870999 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871105 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871254 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871366 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871443 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9r9s\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871130 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.871301 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.875976 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.876464 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.877468 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.878179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.892372 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9r9s\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s\") pod \"glance-default-external-api-0\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.959238 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 14:58:44 crc kubenswrapper[4762]: I1009 14:58:44.982699 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34f7541e-028e-454c-9af5-1e8da71ddfc5" path="/var/lib/kubelet/pods/34f7541e-028e-454c-9af5-1e8da71ddfc5/volumes" Oct 09 14:58:45 crc kubenswrapper[4762]: I1009 14:58:45.489904 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 14:58:45 crc kubenswrapper[4762]: I1009 14:58:45.586658 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerStarted","Data":"630e22431f1bb5ef1287dccc79fe96763f1cfb290504dc3b0d9de83a0f2bc6ae"} Oct 09 14:58:45 crc kubenswrapper[4762]: I1009 14:58:45.586823 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-log" containerID="cri-o://9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" gracePeriod=30 Oct 09 14:58:45 crc kubenswrapper[4762]: I1009 14:58:45.586899 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-httpd" containerID="cri-o://02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" gracePeriod=30 Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.188882 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304099 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304162 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304217 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dc979\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304325 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304415 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304437 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.304478 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle\") pod \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\" (UID: \"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7\") " Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.305241 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.305224 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs" (OuterVolumeSpecName: "logs") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.306730 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-logs\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.306763 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.309397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979" (OuterVolumeSpecName: "kube-api-access-dc979") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "kube-api-access-dc979". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.309551 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts" (OuterVolumeSpecName: "scripts") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.309666 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph" (OuterVolumeSpecName: "ceph") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.331205 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.348375 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data" (OuterVolumeSpecName: "config-data") pod "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" (UID: "0e79db9a-ff45-4f0e-ba46-517a0caa6ba7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.408089 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.408256 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.408276 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.408294 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dc979\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-kube-api-access-dc979\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.408310 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597330 4762 generic.go:334] "Generic (PLEG): container finished" podID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerID="02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" exitCode=0 Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597622 4762 generic.go:334] "Generic (PLEG): container finished" podID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerID="9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" exitCode=143 Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597394 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597404 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerDied","Data":"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a"} Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597789 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerDied","Data":"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4"} Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0e79db9a-ff45-4f0e-ba46-517a0caa6ba7","Type":"ContainerDied","Data":"5712d5211a9a5ff901875bac2902f05e8c2dcedaff49339357e26d844f14d631"} Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.597831 4762 scope.go:117] "RemoveContainer" containerID="02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.602319 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerStarted","Data":"70304c6746eee214fd29bb7fa8f5a1bf48fff3539a97e9aa1c580bed630a4889"} Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.602361 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerStarted","Data":"62f47e82a0f87475769613f0d9ed3cf973df67d747a5a1ad8e91672e853f070b"} Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.627941 4762 scope.go:117] "RemoveContainer" containerID="9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.631597 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.631575503 podStartE2EDuration="2.631575503s" podCreationTimestamp="2025-10-09 14:58:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:46.618158073 +0000 UTC m=+5602.391949102" watchObservedRunningTime="2025-10-09 14:58:46.631575503 +0000 UTC m=+5602.405366542" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.650520 4762 scope.go:117] "RemoveContainer" containerID="02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" Oct 09 14:58:46 crc kubenswrapper[4762]: E1009 14:58:46.652007 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a\": container with ID starting with 02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a not found: ID does not exist" containerID="02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.652059 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a"} err="failed to get container status \"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a\": rpc error: code = NotFound desc = could not find container \"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a\": container with ID starting with 02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a not found: ID does not exist" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.652091 4762 scope.go:117] "RemoveContainer" containerID="9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" Oct 09 14:58:46 crc kubenswrapper[4762]: E1009 14:58:46.653200 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4\": container with ID starting with 9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4 not found: ID does not exist" containerID="9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.653234 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4"} err="failed to get container status \"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4\": rpc error: code = NotFound desc = could not find container \"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4\": container with ID starting with 9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4 not found: ID does not exist" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.653256 4762 scope.go:117] "RemoveContainer" containerID="02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.653503 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a"} err="failed to get container status \"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a\": rpc error: code = NotFound desc = could not find container \"02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a\": container with ID starting with 02b80b144664e3de0592816e589be5d4128ac9ef58facd9f22dbe768f74b744a not found: ID does not exist" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.653527 4762 scope.go:117] "RemoveContainer" containerID="9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.653815 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4"} err="failed to get container status \"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4\": rpc error: code = NotFound desc = could not find container \"9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4\": container with ID starting with 9d054963b9c07c65a31b51756d3a1d6710e87db664f00fda505d64e1da8bccc4 not found: ID does not exist" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.661908 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.679780 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.682920 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:46 crc kubenswrapper[4762]: E1009 14:58:46.683395 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-httpd" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.683414 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-httpd" Oct 09 14:58:46 crc kubenswrapper[4762]: E1009 14:58:46.683429 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-log" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.683436 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-log" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.683832 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-log" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.683861 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" containerName="glance-httpd" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.685100 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.689190 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.690548 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.818891 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.818998 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.819039 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrpmf\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.819073 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.819089 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.819115 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.819154 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920712 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920792 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920826 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrpmf\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920849 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920865 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920881 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.920907 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.921483 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.921592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.925387 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.925537 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.926408 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.936603 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.943625 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrpmf\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf\") pod \"glance-default-internal-api-0\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " pod="openstack/glance-default-internal-api-0" Oct 09 14:58:46 crc kubenswrapper[4762]: I1009 14:58:46.975791 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e79db9a-ff45-4f0e-ba46-517a0caa6ba7" path="/var/lib/kubelet/pods/0e79db9a-ff45-4f0e-ba46-517a0caa6ba7/volumes" Oct 09 14:58:47 crc kubenswrapper[4762]: I1009 14:58:47.016937 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:47 crc kubenswrapper[4762]: I1009 14:58:47.312148 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 14:58:47 crc kubenswrapper[4762]: W1009 14:58:47.314396 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4103bdae_6031_44fc_b303_057c8e736522.slice/crio-0e72b5e512ca5f269ce3796ac185a3a591f3863481fabc85fb5256946784c7a1 WatchSource:0}: Error finding container 0e72b5e512ca5f269ce3796ac185a3a591f3863481fabc85fb5256946784c7a1: Status 404 returned error can't find the container with id 0e72b5e512ca5f269ce3796ac185a3a591f3863481fabc85fb5256946784c7a1 Oct 09 14:58:47 crc kubenswrapper[4762]: I1009 14:58:47.612993 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerStarted","Data":"0e72b5e512ca5f269ce3796ac185a3a591f3863481fabc85fb5256946784c7a1"} Oct 09 14:58:48 crc kubenswrapper[4762]: I1009 14:58:48.623875 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerStarted","Data":"fd1c0ea3a8cb306001de4697dbca9a41f4d87c20a678b5e27ebe877b47b21d44"} Oct 09 14:58:48 crc kubenswrapper[4762]: I1009 14:58:48.625286 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerStarted","Data":"80c961758dba56cc8b42c9001e9c413045ee23035c3a73f19e76a8b6d9acdfe9"} Oct 09 14:58:48 crc kubenswrapper[4762]: I1009 14:58:48.642110 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.642092152 podStartE2EDuration="2.642092152s" podCreationTimestamp="2025-10-09 14:58:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:58:48.640326415 +0000 UTC m=+5604.414117454" watchObservedRunningTime="2025-10-09 14:58:48.642092152 +0000 UTC m=+5604.415883191" Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.316060 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.381754 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.382055 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="dnsmasq-dns" containerID="cri-o://f86933fff62c2da56ad3b5bc0f39ea605a80029b09cbd911eaaec1cd8ffbe60b" gracePeriod=10 Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.652493 4762 generic.go:334] "Generic (PLEG): container finished" podID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerID="f86933fff62c2da56ad3b5bc0f39ea605a80029b09cbd911eaaec1cd8ffbe60b" exitCode=0 Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.652595 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" event={"ID":"8dbb926d-7b4f-46f7-801b-5c02b1c6becd","Type":"ContainerDied","Data":"f86933fff62c2da56ad3b5bc0f39ea605a80029b09cbd911eaaec1cd8ffbe60b"} Oct 09 14:58:51 crc kubenswrapper[4762]: I1009 14:58:51.849038 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.029664 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config\") pod \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.029772 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25hnz\" (UniqueName: \"kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz\") pod \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.029887 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb\") pod \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.029944 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb\") pod \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.030016 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc\") pod \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\" (UID: \"8dbb926d-7b4f-46f7-801b-5c02b1c6becd\") " Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.041784 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz" (OuterVolumeSpecName: "kube-api-access-25hnz") pod "8dbb926d-7b4f-46f7-801b-5c02b1c6becd" (UID: "8dbb926d-7b4f-46f7-801b-5c02b1c6becd"). InnerVolumeSpecName "kube-api-access-25hnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.076372 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config" (OuterVolumeSpecName: "config") pod "8dbb926d-7b4f-46f7-801b-5c02b1c6becd" (UID: "8dbb926d-7b4f-46f7-801b-5c02b1c6becd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.077119 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8dbb926d-7b4f-46f7-801b-5c02b1c6becd" (UID: "8dbb926d-7b4f-46f7-801b-5c02b1c6becd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.077527 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8dbb926d-7b4f-46f7-801b-5c02b1c6becd" (UID: "8dbb926d-7b4f-46f7-801b-5c02b1c6becd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.091324 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8dbb926d-7b4f-46f7-801b-5c02b1c6becd" (UID: "8dbb926d-7b4f-46f7-801b-5c02b1c6becd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.133807 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.134029 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.134137 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.134549 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25hnz\" (UniqueName: \"kubernetes.io/projected/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-kube-api-access-25hnz\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.134661 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dbb926d-7b4f-46f7-801b-5c02b1c6becd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.667176 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" event={"ID":"8dbb926d-7b4f-46f7-801b-5c02b1c6becd","Type":"ContainerDied","Data":"21f76590ad653e6ac455d36ddae3749ef3122d8596e258992e01b1c8f75ade6f"} Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.667241 4762 scope.go:117] "RemoveContainer" containerID="f86933fff62c2da56ad3b5bc0f39ea605a80029b09cbd911eaaec1cd8ffbe60b" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.667278 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fd4468769-4ghqc" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.688079 4762 scope.go:117] "RemoveContainer" containerID="b39268e6efa97c31a30e20380f1a13b38b7fe6169103769591f3e5c125c9bf12" Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.704539 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.720485 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fd4468769-4ghqc"] Oct 09 14:58:52 crc kubenswrapper[4762]: I1009 14:58:52.974822 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" path="/var/lib/kubelet/pods/8dbb926d-7b4f-46f7-801b-5c02b1c6becd/volumes" Oct 09 14:58:53 crc kubenswrapper[4762]: I1009 14:58:53.965301 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 14:58:54 crc kubenswrapper[4762]: I1009 14:58:54.692359 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce"} Oct 09 14:58:54 crc kubenswrapper[4762]: I1009 14:58:54.959785 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 14:58:54 crc kubenswrapper[4762]: I1009 14:58:54.959859 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 14:58:54 crc kubenswrapper[4762]: I1009 14:58:54.992434 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 14:58:55 crc kubenswrapper[4762]: I1009 14:58:55.012495 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 14:58:55 crc kubenswrapper[4762]: I1009 14:58:55.705608 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 14:58:55 crc kubenswrapper[4762]: I1009 14:58:55.705950 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.018046 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.018376 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.052672 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.074673 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.728304 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.728364 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.879322 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.879460 4762 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 14:58:57 crc kubenswrapper[4762]: I1009 14:58:57.881102 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 14:58:59 crc kubenswrapper[4762]: I1009 14:58:59.699001 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 14:58:59 crc kubenswrapper[4762]: I1009 14:58:59.719666 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.003745 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-bbvxh"] Oct 09 14:59:06 crc kubenswrapper[4762]: E1009 14:59:06.004497 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="init" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.004509 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="init" Oct 09 14:59:06 crc kubenswrapper[4762]: E1009 14:59:06.004534 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="dnsmasq-dns" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.004540 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="dnsmasq-dns" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.004797 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dbb926d-7b4f-46f7-801b-5c02b1c6becd" containerName="dnsmasq-dns" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.005385 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.022477 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bbvxh"] Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.023929 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4n7d\" (UniqueName: \"kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d\") pod \"placement-db-create-bbvxh\" (UID: \"e86ddeaa-60ac-49e4-8ca4-39985b6689ed\") " pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.125888 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4n7d\" (UniqueName: \"kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d\") pod \"placement-db-create-bbvxh\" (UID: \"e86ddeaa-60ac-49e4-8ca4-39985b6689ed\") " pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.149083 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4n7d\" (UniqueName: \"kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d\") pod \"placement-db-create-bbvxh\" (UID: \"e86ddeaa-60ac-49e4-8ca4-39985b6689ed\") " pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.329285 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.774671 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bbvxh"] Oct 09 14:59:06 crc kubenswrapper[4762]: I1009 14:59:06.817837 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bbvxh" event={"ID":"e86ddeaa-60ac-49e4-8ca4-39985b6689ed","Type":"ContainerStarted","Data":"4419155118bd6852874c0ef778d3922fe2898d371915936d963f65eaff28afd7"} Oct 09 14:59:07 crc kubenswrapper[4762]: I1009 14:59:07.829387 4762 generic.go:334] "Generic (PLEG): container finished" podID="e86ddeaa-60ac-49e4-8ca4-39985b6689ed" containerID="89770725c7eb395315bb62ef60eb314a9245b790ff0d1a72a970bf6e598fd56e" exitCode=0 Oct 09 14:59:07 crc kubenswrapper[4762]: I1009 14:59:07.829470 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bbvxh" event={"ID":"e86ddeaa-60ac-49e4-8ca4-39985b6689ed","Type":"ContainerDied","Data":"89770725c7eb395315bb62ef60eb314a9245b790ff0d1a72a970bf6e598fd56e"} Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.124544 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.276053 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4n7d\" (UniqueName: \"kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d\") pod \"e86ddeaa-60ac-49e4-8ca4-39985b6689ed\" (UID: \"e86ddeaa-60ac-49e4-8ca4-39985b6689ed\") " Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.285133 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d" (OuterVolumeSpecName: "kube-api-access-v4n7d") pod "e86ddeaa-60ac-49e4-8ca4-39985b6689ed" (UID: "e86ddeaa-60ac-49e4-8ca4-39985b6689ed"). InnerVolumeSpecName "kube-api-access-v4n7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.377851 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4n7d\" (UniqueName: \"kubernetes.io/projected/e86ddeaa-60ac-49e4-8ca4-39985b6689ed-kube-api-access-v4n7d\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.849692 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bbvxh" event={"ID":"e86ddeaa-60ac-49e4-8ca4-39985b6689ed","Type":"ContainerDied","Data":"4419155118bd6852874c0ef778d3922fe2898d371915936d963f65eaff28afd7"} Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.849740 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4419155118bd6852874c0ef778d3922fe2898d371915936d963f65eaff28afd7" Oct 09 14:59:09 crc kubenswrapper[4762]: I1009 14:59:09.849753 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bbvxh" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.047326 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a3a2-account-create-hsvrr"] Oct 09 14:59:16 crc kubenswrapper[4762]: E1009 14:59:16.048383 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86ddeaa-60ac-49e4-8ca4-39985b6689ed" containerName="mariadb-database-create" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.048399 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86ddeaa-60ac-49e4-8ca4-39985b6689ed" containerName="mariadb-database-create" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.048654 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e86ddeaa-60ac-49e4-8ca4-39985b6689ed" containerName="mariadb-database-create" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.049422 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.055569 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a3a2-account-create-hsvrr"] Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.066354 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.203672 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6s7v\" (UniqueName: \"kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v\") pod \"placement-a3a2-account-create-hsvrr\" (UID: \"aa9f5502-22d1-4a17-bf73-b1514be38584\") " pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.305545 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6s7v\" (UniqueName: \"kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v\") pod \"placement-a3a2-account-create-hsvrr\" (UID: \"aa9f5502-22d1-4a17-bf73-b1514be38584\") " pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.335349 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6s7v\" (UniqueName: \"kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v\") pod \"placement-a3a2-account-create-hsvrr\" (UID: \"aa9f5502-22d1-4a17-bf73-b1514be38584\") " pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.376524 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:16 crc kubenswrapper[4762]: W1009 14:59:16.834697 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa9f5502_22d1_4a17_bf73_b1514be38584.slice/crio-2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97 WatchSource:0}: Error finding container 2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97: Status 404 returned error can't find the container with id 2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97 Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.837199 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a3a2-account-create-hsvrr"] Oct 09 14:59:16 crc kubenswrapper[4762]: I1009 14:59:16.912985 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a3a2-account-create-hsvrr" event={"ID":"aa9f5502-22d1-4a17-bf73-b1514be38584","Type":"ContainerStarted","Data":"2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97"} Oct 09 14:59:17 crc kubenswrapper[4762]: I1009 14:59:17.928613 4762 generic.go:334] "Generic (PLEG): container finished" podID="aa9f5502-22d1-4a17-bf73-b1514be38584" containerID="ea6ed944f07582bbe022cce81110914fdb47c15a632919a2a48e4a6bd9487343" exitCode=0 Oct 09 14:59:17 crc kubenswrapper[4762]: I1009 14:59:17.929078 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a3a2-account-create-hsvrr" event={"ID":"aa9f5502-22d1-4a17-bf73-b1514be38584","Type":"ContainerDied","Data":"ea6ed944f07582bbe022cce81110914fdb47c15a632919a2a48e4a6bd9487343"} Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.307790 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.465245 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6s7v\" (UniqueName: \"kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v\") pod \"aa9f5502-22d1-4a17-bf73-b1514be38584\" (UID: \"aa9f5502-22d1-4a17-bf73-b1514be38584\") " Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.470979 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v" (OuterVolumeSpecName: "kube-api-access-k6s7v") pod "aa9f5502-22d1-4a17-bf73-b1514be38584" (UID: "aa9f5502-22d1-4a17-bf73-b1514be38584"). InnerVolumeSpecName "kube-api-access-k6s7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.568233 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6s7v\" (UniqueName: \"kubernetes.io/projected/aa9f5502-22d1-4a17-bf73-b1514be38584-kube-api-access-k6s7v\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.950580 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a3a2-account-create-hsvrr" event={"ID":"aa9f5502-22d1-4a17-bf73-b1514be38584","Type":"ContainerDied","Data":"2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97"} Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.950628 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c8854ad21ba989fdec1a4b8bcd543abd6f295999a201686b08ce4ad5bc2ab97" Oct 09 14:59:19 crc kubenswrapper[4762]: I1009 14:59:19.950661 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a3a2-account-create-hsvrr" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.234728 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-c66sk"] Oct 09 14:59:21 crc kubenswrapper[4762]: E1009 14:59:21.235506 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa9f5502-22d1-4a17-bf73-b1514be38584" containerName="mariadb-account-create" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.235522 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa9f5502-22d1-4a17-bf73-b1514be38584" containerName="mariadb-account-create" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.235780 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa9f5502-22d1-4a17-bf73-b1514be38584" containerName="mariadb-account-create" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.236573 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.239030 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.239300 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.240663 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-wbzwn" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.253139 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-c66sk"] Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.272803 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.277007 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.316207 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399150 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399231 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399291 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399319 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399345 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399375 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkmfr\" (UniqueName: \"kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399396 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9sk5\" (UniqueName: \"kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399434 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399479 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.399514 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501437 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501590 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501685 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501746 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkmfr\" (UniqueName: \"kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501794 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9sk5\" (UniqueName: \"kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501875 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.501965 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.502040 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.502173 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.502269 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.502869 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.503841 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.503859 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.504746 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.504801 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.508100 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.508265 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.516000 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.532138 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9sk5\" (UniqueName: \"kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5\") pod \"placement-db-sync-c66sk\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.533108 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkmfr\" (UniqueName: \"kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr\") pod \"dnsmasq-dns-649fc75c7c-h4tvb\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.567027 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:21 crc kubenswrapper[4762]: I1009 14:59:21.610991 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.055206 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-c66sk"] Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.130233 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 14:59:22 crc kubenswrapper[4762]: W1009 14:59:22.135700 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7391852_4a4f_440c_aaf1_5d02555553e7.slice/crio-5ed54814350838f3f03d39c0e962bd82a1035749544c08a91339cce47ade3603 WatchSource:0}: Error finding container 5ed54814350838f3f03d39c0e962bd82a1035749544c08a91339cce47ade3603: Status 404 returned error can't find the container with id 5ed54814350838f3f03d39c0e962bd82a1035749544c08a91339cce47ade3603 Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.978514 4762 generic.go:334] "Generic (PLEG): container finished" podID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerID="f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40" exitCode=0 Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.978658 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" event={"ID":"b7391852-4a4f-440c-aaf1-5d02555553e7","Type":"ContainerDied","Data":"f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40"} Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.978874 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" event={"ID":"b7391852-4a4f-440c-aaf1-5d02555553e7","Type":"ContainerStarted","Data":"5ed54814350838f3f03d39c0e962bd82a1035749544c08a91339cce47ade3603"} Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.981557 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-c66sk" event={"ID":"91f63920-a550-4f27-b780-f213efb1ca79","Type":"ContainerStarted","Data":"fb98d1fd0e827f298ba45d81a9c2b4bdc163710db54f80ba5f293757ffcad614"} Oct 09 14:59:22 crc kubenswrapper[4762]: I1009 14:59:22.981601 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-c66sk" event={"ID":"91f63920-a550-4f27-b780-f213efb1ca79","Type":"ContainerStarted","Data":"059d5a2cb3d88223e80ca02154d6b4a753b8356971f8aa799ea29998c3a6d067"} Oct 09 14:59:23 crc kubenswrapper[4762]: I1009 14:59:23.041598 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-c66sk" podStartSLOduration=2.041570448 podStartE2EDuration="2.041570448s" podCreationTimestamp="2025-10-09 14:59:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:59:23.031970428 +0000 UTC m=+5638.805761487" watchObservedRunningTime="2025-10-09 14:59:23.041570448 +0000 UTC m=+5638.815361487" Oct 09 14:59:23 crc kubenswrapper[4762]: I1009 14:59:23.990921 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" event={"ID":"b7391852-4a4f-440c-aaf1-5d02555553e7","Type":"ContainerStarted","Data":"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d"} Oct 09 14:59:23 crc kubenswrapper[4762]: I1009 14:59:23.991343 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:23 crc kubenswrapper[4762]: I1009 14:59:23.992822 4762 generic.go:334] "Generic (PLEG): container finished" podID="91f63920-a550-4f27-b780-f213efb1ca79" containerID="fb98d1fd0e827f298ba45d81a9c2b4bdc163710db54f80ba5f293757ffcad614" exitCode=0 Oct 09 14:59:23 crc kubenswrapper[4762]: I1009 14:59:23.992890 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-c66sk" event={"ID":"91f63920-a550-4f27-b780-f213efb1ca79","Type":"ContainerDied","Data":"fb98d1fd0e827f298ba45d81a9c2b4bdc163710db54f80ba5f293757ffcad614"} Oct 09 14:59:24 crc kubenswrapper[4762]: I1009 14:59:24.018555 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" podStartSLOduration=3.01853401 podStartE2EDuration="3.01853401s" podCreationTimestamp="2025-10-09 14:59:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:59:24.01275727 +0000 UTC m=+5639.786548339" watchObservedRunningTime="2025-10-09 14:59:24.01853401 +0000 UTC m=+5639.792325059" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.365474 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.483483 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9sk5\" (UniqueName: \"kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5\") pod \"91f63920-a550-4f27-b780-f213efb1ca79\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.483562 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts\") pod \"91f63920-a550-4f27-b780-f213efb1ca79\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.483623 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle\") pod \"91f63920-a550-4f27-b780-f213efb1ca79\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.483682 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data\") pod \"91f63920-a550-4f27-b780-f213efb1ca79\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.483859 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs\") pod \"91f63920-a550-4f27-b780-f213efb1ca79\" (UID: \"91f63920-a550-4f27-b780-f213efb1ca79\") " Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.484837 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs" (OuterVolumeSpecName: "logs") pod "91f63920-a550-4f27-b780-f213efb1ca79" (UID: "91f63920-a550-4f27-b780-f213efb1ca79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.489675 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5" (OuterVolumeSpecName: "kube-api-access-c9sk5") pod "91f63920-a550-4f27-b780-f213efb1ca79" (UID: "91f63920-a550-4f27-b780-f213efb1ca79"). InnerVolumeSpecName "kube-api-access-c9sk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.491885 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts" (OuterVolumeSpecName: "scripts") pod "91f63920-a550-4f27-b780-f213efb1ca79" (UID: "91f63920-a550-4f27-b780-f213efb1ca79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.514205 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data" (OuterVolumeSpecName: "config-data") pod "91f63920-a550-4f27-b780-f213efb1ca79" (UID: "91f63920-a550-4f27-b780-f213efb1ca79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.533982 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91f63920-a550-4f27-b780-f213efb1ca79" (UID: "91f63920-a550-4f27-b780-f213efb1ca79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.585906 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.586092 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.586223 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91f63920-a550-4f27-b780-f213efb1ca79-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.586377 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91f63920-a550-4f27-b780-f213efb1ca79-logs\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:25 crc kubenswrapper[4762]: I1009 14:59:25.586479 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9sk5\" (UniqueName: \"kubernetes.io/projected/91f63920-a550-4f27-b780-f213efb1ca79-kube-api-access-c9sk5\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.014765 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-c66sk" event={"ID":"91f63920-a550-4f27-b780-f213efb1ca79","Type":"ContainerDied","Data":"059d5a2cb3d88223e80ca02154d6b4a753b8356971f8aa799ea29998c3a6d067"} Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.014966 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="059d5a2cb3d88223e80ca02154d6b4a753b8356971f8aa799ea29998c3a6d067" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.014841 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-c66sk" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.479600 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6795fbd6fb-6m2sc"] Oct 09 14:59:26 crc kubenswrapper[4762]: E1009 14:59:26.480541 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f63920-a550-4f27-b780-f213efb1ca79" containerName="placement-db-sync" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.480560 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f63920-a550-4f27-b780-f213efb1ca79" containerName="placement-db-sync" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.480960 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f63920-a550-4f27-b780-f213efb1ca79" containerName="placement-db-sync" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.482900 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.490252 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.490255 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.492184 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-wbzwn" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.495350 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6795fbd6fb-6m2sc"] Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.504284 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzvsx\" (UniqueName: \"kubernetes.io/projected/ea337cfa-2396-4905-8a6d-3fe0997bbda0-kube-api-access-bzvsx\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.504410 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-combined-ca-bundle\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.504446 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-config-data\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.504548 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-scripts\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.504734 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea337cfa-2396-4905-8a6d-3fe0997bbda0-logs\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.606856 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzvsx\" (UniqueName: \"kubernetes.io/projected/ea337cfa-2396-4905-8a6d-3fe0997bbda0-kube-api-access-bzvsx\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.607002 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-combined-ca-bundle\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.607034 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-config-data\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.607093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-scripts\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.607185 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea337cfa-2396-4905-8a6d-3fe0997bbda0-logs\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.607770 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea337cfa-2396-4905-8a6d-3fe0997bbda0-logs\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.611344 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-scripts\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.611570 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-combined-ca-bundle\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.611861 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea337cfa-2396-4905-8a6d-3fe0997bbda0-config-data\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.626969 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzvsx\" (UniqueName: \"kubernetes.io/projected/ea337cfa-2396-4905-8a6d-3fe0997bbda0-kube-api-access-bzvsx\") pod \"placement-6795fbd6fb-6m2sc\" (UID: \"ea337cfa-2396-4905-8a6d-3fe0997bbda0\") " pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:26 crc kubenswrapper[4762]: I1009 14:59:26.810614 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:27 crc kubenswrapper[4762]: I1009 14:59:27.261995 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6795fbd6fb-6m2sc"] Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.037912 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6795fbd6fb-6m2sc" event={"ID":"ea337cfa-2396-4905-8a6d-3fe0997bbda0","Type":"ContainerStarted","Data":"e77b22400a74b619550644679b066a8eb81b7b3bb176b92dfea489f29270e869"} Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.038483 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6795fbd6fb-6m2sc" event={"ID":"ea337cfa-2396-4905-8a6d-3fe0997bbda0","Type":"ContainerStarted","Data":"3974cbf8402a44c8f9579a74652517a866cecdbaec3b76f0feb42c2784fece96"} Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.038521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6795fbd6fb-6m2sc" event={"ID":"ea337cfa-2396-4905-8a6d-3fe0997bbda0","Type":"ContainerStarted","Data":"f1749ba4e873e3ea56d1ba698423e83f347e3f99b8fb08fbf4d3204354426f31"} Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.038560 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.038572 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:28 crc kubenswrapper[4762]: I1009 14:59:28.065878 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6795fbd6fb-6m2sc" podStartSLOduration=2.065853343 podStartE2EDuration="2.065853343s" podCreationTimestamp="2025-10-09 14:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 14:59:28.05616928 +0000 UTC m=+5643.829960319" watchObservedRunningTime="2025-10-09 14:59:28.065853343 +0000 UTC m=+5643.839644382" Oct 09 14:59:31 crc kubenswrapper[4762]: I1009 14:59:31.613932 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 14:59:31 crc kubenswrapper[4762]: I1009 14:59:31.683260 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:59:31 crc kubenswrapper[4762]: I1009 14:59:31.683537 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="dnsmasq-dns" containerID="cri-o://5fc2587c187ed65683ae06f2ceb6b1405128364dbbac66c86648ffb4b05ecd57" gracePeriod=10 Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.078062 4762 generic.go:334] "Generic (PLEG): container finished" podID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerID="5fc2587c187ed65683ae06f2ceb6b1405128364dbbac66c86648ffb4b05ecd57" exitCode=0 Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.078234 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" event={"ID":"537e6301-d98a-4829-b0e0-7d2b4dd95d6c","Type":"ContainerDied","Data":"5fc2587c187ed65683ae06f2ceb6b1405128364dbbac66c86648ffb4b05ecd57"} Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.189061 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.313784 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc\") pod \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.313851 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb\") pod \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.313885 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config\") pod \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.313938 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb\") pod \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.314101 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbjpv\" (UniqueName: \"kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv\") pod \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\" (UID: \"537e6301-d98a-4829-b0e0-7d2b4dd95d6c\") " Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.321884 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv" (OuterVolumeSpecName: "kube-api-access-vbjpv") pod "537e6301-d98a-4829-b0e0-7d2b4dd95d6c" (UID: "537e6301-d98a-4829-b0e0-7d2b4dd95d6c"). InnerVolumeSpecName "kube-api-access-vbjpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.359292 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "537e6301-d98a-4829-b0e0-7d2b4dd95d6c" (UID: "537e6301-d98a-4829-b0e0-7d2b4dd95d6c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.360550 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "537e6301-d98a-4829-b0e0-7d2b4dd95d6c" (UID: "537e6301-d98a-4829-b0e0-7d2b4dd95d6c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.365665 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config" (OuterVolumeSpecName: "config") pod "537e6301-d98a-4829-b0e0-7d2b4dd95d6c" (UID: "537e6301-d98a-4829-b0e0-7d2b4dd95d6c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.373893 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "537e6301-d98a-4829-b0e0-7d2b4dd95d6c" (UID: "537e6301-d98a-4829-b0e0-7d2b4dd95d6c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.416295 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbjpv\" (UniqueName: \"kubernetes.io/projected/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-kube-api-access-vbjpv\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.416335 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.416344 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.416352 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-config\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:32 crc kubenswrapper[4762]: I1009 14:59:32.416361 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/537e6301-d98a-4829-b0e0-7d2b4dd95d6c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.088628 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" event={"ID":"537e6301-d98a-4829-b0e0-7d2b4dd95d6c","Type":"ContainerDied","Data":"44335e817904bc3789a5386e9dd7670b16128afcf353bdf2cc02d92069701c9a"} Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.088907 4762 scope.go:117] "RemoveContainer" containerID="5fc2587c187ed65683ae06f2ceb6b1405128364dbbac66c86648ffb4b05ecd57" Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.088737 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8455bb8bd7-nc52k" Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.115051 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.116293 4762 scope.go:117] "RemoveContainer" containerID="1b2393445ed6c4392bc1c84ab2db85d421b2d0118ccee1f08c96b74c53d745da" Oct 09 14:59:33 crc kubenswrapper[4762]: I1009 14:59:33.123093 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8455bb8bd7-nc52k"] Oct 09 14:59:34 crc kubenswrapper[4762]: I1009 14:59:34.981397 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" path="/var/lib/kubelet/pods/537e6301-d98a-4829-b0e0-7d2b4dd95d6c/volumes" Oct 09 14:59:57 crc kubenswrapper[4762]: I1009 14:59:57.826416 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 14:59:57 crc kubenswrapper[4762]: I1009 14:59:57.829943 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6795fbd6fb-6m2sc" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.133705 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr"] Oct 09 15:00:00 crc kubenswrapper[4762]: E1009 15:00:00.136619 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="init" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.136889 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="init" Oct 09 15:00:00 crc kubenswrapper[4762]: E1009 15:00:00.137119 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="dnsmasq-dns" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.137258 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="dnsmasq-dns" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.137782 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="537e6301-d98a-4829-b0e0-7d2b4dd95d6c" containerName="dnsmasq-dns" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.139040 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.144095 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr"] Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.145421 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.145657 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.264969 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7hn4\" (UniqueName: \"kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.265317 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.265442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.367266 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.367609 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.367896 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7hn4\" (UniqueName: \"kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.368591 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.376891 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.389108 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7hn4\" (UniqueName: \"kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4\") pod \"collect-profiles-29333700-jl2hr\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.474905 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:00 crc kubenswrapper[4762]: I1009 15:00:00.939024 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr"] Oct 09 15:00:01 crc kubenswrapper[4762]: I1009 15:00:01.361135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" event={"ID":"ac9d3991-fdff-41b8-8f2c-7ee68463518b","Type":"ContainerStarted","Data":"9e20acdabed285a9eeb1dc1611543c123515b746fbfedfef4d2fa0f7c216f8dd"} Oct 09 15:00:01 crc kubenswrapper[4762]: I1009 15:00:01.361480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" event={"ID":"ac9d3991-fdff-41b8-8f2c-7ee68463518b","Type":"ContainerStarted","Data":"0ec7f40d0ddf291cbe0f67a20b82644809c51e566c0e820129340032f291fa0e"} Oct 09 15:00:01 crc kubenswrapper[4762]: I1009 15:00:01.391843 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" podStartSLOduration=1.391822989 podStartE2EDuration="1.391822989s" podCreationTimestamp="2025-10-09 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:01.386220874 +0000 UTC m=+5677.160011913" watchObservedRunningTime="2025-10-09 15:00:01.391822989 +0000 UTC m=+5677.165614028" Oct 09 15:00:02 crc kubenswrapper[4762]: I1009 15:00:02.371408 4762 generic.go:334] "Generic (PLEG): container finished" podID="ac9d3991-fdff-41b8-8f2c-7ee68463518b" containerID="9e20acdabed285a9eeb1dc1611543c123515b746fbfedfef4d2fa0f7c216f8dd" exitCode=0 Oct 09 15:00:02 crc kubenswrapper[4762]: I1009 15:00:02.371464 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" event={"ID":"ac9d3991-fdff-41b8-8f2c-7ee68463518b","Type":"ContainerDied","Data":"9e20acdabed285a9eeb1dc1611543c123515b746fbfedfef4d2fa0f7c216f8dd"} Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.040257 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.146029 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume\") pod \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.146312 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume\") pod \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.146462 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7hn4\" (UniqueName: \"kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4\") pod \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\" (UID: \"ac9d3991-fdff-41b8-8f2c-7ee68463518b\") " Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.148995 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume" (OuterVolumeSpecName: "config-volume") pod "ac9d3991-fdff-41b8-8f2c-7ee68463518b" (UID: "ac9d3991-fdff-41b8-8f2c-7ee68463518b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.156049 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ac9d3991-fdff-41b8-8f2c-7ee68463518b" (UID: "ac9d3991-fdff-41b8-8f2c-7ee68463518b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.156422 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4" (OuterVolumeSpecName: "kube-api-access-h7hn4") pod "ac9d3991-fdff-41b8-8f2c-7ee68463518b" (UID: "ac9d3991-fdff-41b8-8f2c-7ee68463518b"). InnerVolumeSpecName "kube-api-access-h7hn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.249363 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac9d3991-fdff-41b8-8f2c-7ee68463518b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.249419 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac9d3991-fdff-41b8-8f2c-7ee68463518b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.249438 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7hn4\" (UniqueName: \"kubernetes.io/projected/ac9d3991-fdff-41b8-8f2c-7ee68463518b-kube-api-access-h7hn4\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.387413 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" event={"ID":"ac9d3991-fdff-41b8-8f2c-7ee68463518b","Type":"ContainerDied","Data":"0ec7f40d0ddf291cbe0f67a20b82644809c51e566c0e820129340032f291fa0e"} Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.387662 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ec7f40d0ddf291cbe0f67a20b82644809c51e566c0e820129340032f291fa0e" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.387543 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr" Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.466854 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k"] Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.474717 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333655-rpr8k"] Oct 09 15:00:04 crc kubenswrapper[4762]: I1009 15:00:04.976235 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bf72a90-6a69-4588-beea-759828586d14" path="/var/lib/kubelet/pods/0bf72a90-6a69-4588-beea-759828586d14/volumes" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.413032 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-jxttd"] Oct 09 15:00:21 crc kubenswrapper[4762]: E1009 15:00:21.422851 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac9d3991-fdff-41b8-8f2c-7ee68463518b" containerName="collect-profiles" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.422957 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac9d3991-fdff-41b8-8f2c-7ee68463518b" containerName="collect-profiles" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.423259 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac9d3991-fdff-41b8-8f2c-7ee68463518b" containerName="collect-profiles" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.424154 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.442058 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-jxttd"] Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.505041 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-bwsbz"] Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.506681 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.513093 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bwsbz"] Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.579790 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74dqq\" (UniqueName: \"kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq\") pod \"nova-api-db-create-jxttd\" (UID: \"f1c03653-3bb2-4dd8-bdfa-2800ea5be520\") " pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.613355 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pqqvf"] Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.624333 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pqqvf"] Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.624450 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.682170 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74dqq\" (UniqueName: \"kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq\") pod \"nova-api-db-create-jxttd\" (UID: \"f1c03653-3bb2-4dd8-bdfa-2800ea5be520\") " pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.682350 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rszjj\" (UniqueName: \"kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj\") pod \"nova-cell0-db-create-bwsbz\" (UID: \"1c0662f2-1074-482c-98b7-7bfe6d91961f\") " pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.707812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74dqq\" (UniqueName: \"kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq\") pod \"nova-api-db-create-jxttd\" (UID: \"f1c03653-3bb2-4dd8-bdfa-2800ea5be520\") " pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.743783 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.783594 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhlw4\" (UniqueName: \"kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4\") pod \"nova-cell1-db-create-pqqvf\" (UID: \"27b71b3c-8972-4eb3-9bba-b917c7c51d4f\") " pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.783899 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rszjj\" (UniqueName: \"kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj\") pod \"nova-cell0-db-create-bwsbz\" (UID: \"1c0662f2-1074-482c-98b7-7bfe6d91961f\") " pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.799188 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rszjj\" (UniqueName: \"kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj\") pod \"nova-cell0-db-create-bwsbz\" (UID: \"1c0662f2-1074-482c-98b7-7bfe6d91961f\") " pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.825973 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.885544 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhlw4\" (UniqueName: \"kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4\") pod \"nova-cell1-db-create-pqqvf\" (UID: \"27b71b3c-8972-4eb3-9bba-b917c7c51d4f\") " pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.909550 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhlw4\" (UniqueName: \"kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4\") pod \"nova-cell1-db-create-pqqvf\" (UID: \"27b71b3c-8972-4eb3-9bba-b917c7c51d4f\") " pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:21 crc kubenswrapper[4762]: I1009 15:00:21.939212 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.164752 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bwsbz"] Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.238855 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-jxttd"] Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.467093 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pqqvf"] Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.536419 4762 generic.go:334] "Generic (PLEG): container finished" podID="f1c03653-3bb2-4dd8-bdfa-2800ea5be520" containerID="0af15b739bea7fbf1278e62dd74199e5d1d5d6f6de0139edae7d29d154b367bb" exitCode=0 Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.536908 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jxttd" event={"ID":"f1c03653-3bb2-4dd8-bdfa-2800ea5be520","Type":"ContainerDied","Data":"0af15b739bea7fbf1278e62dd74199e5d1d5d6f6de0139edae7d29d154b367bb"} Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.536942 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jxttd" event={"ID":"f1c03653-3bb2-4dd8-bdfa-2800ea5be520","Type":"ContainerStarted","Data":"08e30cae99d6abc338d077b06864a2b5e1dc1af650c558134fecb47a3c670949"} Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.538772 4762 generic.go:334] "Generic (PLEG): container finished" podID="1c0662f2-1074-482c-98b7-7bfe6d91961f" containerID="57bb9f86bf01e6a04c9dba87a6c7b5752afc5d0ab4dc43fd94d8f3b71c315bb0" exitCode=0 Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.538845 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bwsbz" event={"ID":"1c0662f2-1074-482c-98b7-7bfe6d91961f","Type":"ContainerDied","Data":"57bb9f86bf01e6a04c9dba87a6c7b5752afc5d0ab4dc43fd94d8f3b71c315bb0"} Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.538869 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bwsbz" event={"ID":"1c0662f2-1074-482c-98b7-7bfe6d91961f","Type":"ContainerStarted","Data":"b3aa30b163c1ae22d59babbb9a1b6c3d32aeda5141da9485ff52b9dd3bf6422e"} Oct 09 15:00:22 crc kubenswrapper[4762]: I1009 15:00:22.540147 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqqvf" event={"ID":"27b71b3c-8972-4eb3-9bba-b917c7c51d4f","Type":"ContainerStarted","Data":"c7869a27cba5571c33afb599b4fafb509455552105ca49ba9d2833913688a355"} Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.551423 4762 generic.go:334] "Generic (PLEG): container finished" podID="27b71b3c-8972-4eb3-9bba-b917c7c51d4f" containerID="4dad820a499fe65d39497bd860c7110995e2ba06529baa8471443227aa357cc7" exitCode=0 Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.551542 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqqvf" event={"ID":"27b71b3c-8972-4eb3-9bba-b917c7c51d4f","Type":"ContainerDied","Data":"4dad820a499fe65d39497bd860c7110995e2ba06529baa8471443227aa357cc7"} Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.893112 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.897665 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.956099 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rszjj\" (UniqueName: \"kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj\") pod \"1c0662f2-1074-482c-98b7-7bfe6d91961f\" (UID: \"1c0662f2-1074-482c-98b7-7bfe6d91961f\") " Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.956207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74dqq\" (UniqueName: \"kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq\") pod \"f1c03653-3bb2-4dd8-bdfa-2800ea5be520\" (UID: \"f1c03653-3bb2-4dd8-bdfa-2800ea5be520\") " Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.962985 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj" (OuterVolumeSpecName: "kube-api-access-rszjj") pod "1c0662f2-1074-482c-98b7-7bfe6d91961f" (UID: "1c0662f2-1074-482c-98b7-7bfe6d91961f"). InnerVolumeSpecName "kube-api-access-rszjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:23 crc kubenswrapper[4762]: I1009 15:00:23.963984 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq" (OuterVolumeSpecName: "kube-api-access-74dqq") pod "f1c03653-3bb2-4dd8-bdfa-2800ea5be520" (UID: "f1c03653-3bb2-4dd8-bdfa-2800ea5be520"). InnerVolumeSpecName "kube-api-access-74dqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.059082 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rszjj\" (UniqueName: \"kubernetes.io/projected/1c0662f2-1074-482c-98b7-7bfe6d91961f-kube-api-access-rszjj\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.059803 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74dqq\" (UniqueName: \"kubernetes.io/projected/f1c03653-3bb2-4dd8-bdfa-2800ea5be520-kube-api-access-74dqq\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.562052 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-jxttd" event={"ID":"f1c03653-3bb2-4dd8-bdfa-2800ea5be520","Type":"ContainerDied","Data":"08e30cae99d6abc338d077b06864a2b5e1dc1af650c558134fecb47a3c670949"} Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.562079 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-jxttd" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.562100 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08e30cae99d6abc338d077b06864a2b5e1dc1af650c558134fecb47a3c670949" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.577123 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bwsbz" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.577196 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bwsbz" event={"ID":"1c0662f2-1074-482c-98b7-7bfe6d91961f","Type":"ContainerDied","Data":"b3aa30b163c1ae22d59babbb9a1b6c3d32aeda5141da9485ff52b9dd3bf6422e"} Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.577224 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3aa30b163c1ae22d59babbb9a1b6c3d32aeda5141da9485ff52b9dd3bf6422e" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.936275 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:24 crc kubenswrapper[4762]: I1009 15:00:24.972462 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhlw4\" (UniqueName: \"kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4\") pod \"27b71b3c-8972-4eb3-9bba-b917c7c51d4f\" (UID: \"27b71b3c-8972-4eb3-9bba-b917c7c51d4f\") " Oct 09 15:00:25 crc kubenswrapper[4762]: I1009 15:00:25.010151 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4" (OuterVolumeSpecName: "kube-api-access-fhlw4") pod "27b71b3c-8972-4eb3-9bba-b917c7c51d4f" (UID: "27b71b3c-8972-4eb3-9bba-b917c7c51d4f"). InnerVolumeSpecName "kube-api-access-fhlw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:25 crc kubenswrapper[4762]: I1009 15:00:25.074667 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhlw4\" (UniqueName: \"kubernetes.io/projected/27b71b3c-8972-4eb3-9bba-b917c7c51d4f-kube-api-access-fhlw4\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:25 crc kubenswrapper[4762]: I1009 15:00:25.587746 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pqqvf" event={"ID":"27b71b3c-8972-4eb3-9bba-b917c7c51d4f","Type":"ContainerDied","Data":"c7869a27cba5571c33afb599b4fafb509455552105ca49ba9d2833913688a355"} Oct 09 15:00:25 crc kubenswrapper[4762]: I1009 15:00:25.587797 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7869a27cba5571c33afb599b4fafb509455552105ca49ba9d2833913688a355" Oct 09 15:00:25 crc kubenswrapper[4762]: I1009 15:00:25.587851 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pqqvf" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.636330 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-bf89-account-create-xx7sk"] Oct 09 15:00:31 crc kubenswrapper[4762]: E1009 15:00:31.637299 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c03653-3bb2-4dd8-bdfa-2800ea5be520" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637318 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c03653-3bb2-4dd8-bdfa-2800ea5be520" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: E1009 15:00:31.637347 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27b71b3c-8972-4eb3-9bba-b917c7c51d4f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637358 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="27b71b3c-8972-4eb3-9bba-b917c7c51d4f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: E1009 15:00:31.637374 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0662f2-1074-482c-98b7-7bfe6d91961f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637387 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0662f2-1074-482c-98b7-7bfe6d91961f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637611 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1c03653-3bb2-4dd8-bdfa-2800ea5be520" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637665 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0662f2-1074-482c-98b7-7bfe6d91961f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.637677 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="27b71b3c-8972-4eb3-9bba-b917c7c51d4f" containerName="mariadb-database-create" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.638424 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.640805 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.647715 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-bf89-account-create-xx7sk"] Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.796876 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x9mc\" (UniqueName: \"kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc\") pod \"nova-api-bf89-account-create-xx7sk\" (UID: \"891a5bab-9068-4d0d-8d35-4892e7d6d48a\") " pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.831464 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-93aa-account-create-pfzbf"] Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.835458 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.837666 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.848194 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-93aa-account-create-pfzbf"] Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.898677 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x9mc\" (UniqueName: \"kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc\") pod \"nova-api-bf89-account-create-xx7sk\" (UID: \"891a5bab-9068-4d0d-8d35-4892e7d6d48a\") " pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.917566 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x9mc\" (UniqueName: \"kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc\") pod \"nova-api-bf89-account-create-xx7sk\" (UID: \"891a5bab-9068-4d0d-8d35-4892e7d6d48a\") " pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:31 crc kubenswrapper[4762]: I1009 15:00:31.961022 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.001272 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g85ln\" (UniqueName: \"kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln\") pod \"nova-cell0-93aa-account-create-pfzbf\" (UID: \"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9\") " pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.043711 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9a0b-account-create-4fpch"] Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.045696 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.049258 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.051833 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9a0b-account-create-4fpch"] Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.103949 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g85ln\" (UniqueName: \"kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln\") pod \"nova-cell0-93aa-account-create-pfzbf\" (UID: \"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9\") " pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.124470 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g85ln\" (UniqueName: \"kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln\") pod \"nova-cell0-93aa-account-create-pfzbf\" (UID: \"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9\") " pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.156391 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.206619 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lz68\" (UniqueName: \"kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68\") pod \"nova-cell1-9a0b-account-create-4fpch\" (UID: \"300c4244-27ca-4d76-b193-06b7fe2b551e\") " pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.308877 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lz68\" (UniqueName: \"kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68\") pod \"nova-cell1-9a0b-account-create-4fpch\" (UID: \"300c4244-27ca-4d76-b193-06b7fe2b551e\") " pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.329349 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lz68\" (UniqueName: \"kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68\") pod \"nova-cell1-9a0b-account-create-4fpch\" (UID: \"300c4244-27ca-4d76-b193-06b7fe2b551e\") " pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.389068 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.390942 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-bf89-account-create-xx7sk"] Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.513827 4762 scope.go:117] "RemoveContainer" containerID="8a7e64f2a56179a0c804eec034b1b6d3bb99d69afc72701cbde82f446d02f993" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.541236 4762 scope.go:117] "RemoveContainer" containerID="efb3e8262647e061b5c096751e3d1c2ac620fc86b42be91d3719f7b0b106f1c6" Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.583873 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-93aa-account-create-pfzbf"] Oct 09 15:00:32 crc kubenswrapper[4762]: W1009 15:00:32.595953 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cc4bbe7_eeac_43a8_91f3_5549b4429ac9.slice/crio-5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185 WatchSource:0}: Error finding container 5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185: Status 404 returned error can't find the container with id 5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185 Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.668312 4762 generic.go:334] "Generic (PLEG): container finished" podID="891a5bab-9068-4d0d-8d35-4892e7d6d48a" containerID="92e92be78fea06552eb6b46a13415dfcb98451a1b057a30429a01f52f0b2915c" exitCode=0 Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.668590 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-bf89-account-create-xx7sk" event={"ID":"891a5bab-9068-4d0d-8d35-4892e7d6d48a","Type":"ContainerDied","Data":"92e92be78fea06552eb6b46a13415dfcb98451a1b057a30429a01f52f0b2915c"} Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.668853 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-bf89-account-create-xx7sk" event={"ID":"891a5bab-9068-4d0d-8d35-4892e7d6d48a","Type":"ContainerStarted","Data":"858ee4648c388eee2f56d353b55a91d87f38dd119534d0804e6597f953aa75d1"} Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.671194 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93aa-account-create-pfzbf" event={"ID":"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9","Type":"ContainerStarted","Data":"5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185"} Oct 09 15:00:32 crc kubenswrapper[4762]: I1009 15:00:32.844865 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9a0b-account-create-4fpch"] Oct 09 15:00:33 crc kubenswrapper[4762]: I1009 15:00:33.684970 4762 generic.go:334] "Generic (PLEG): container finished" podID="300c4244-27ca-4d76-b193-06b7fe2b551e" containerID="af17c606dfd5e635986fc171df74b00b82ed945d433aa6ab1252781b3db646ec" exitCode=0 Oct 09 15:00:33 crc kubenswrapper[4762]: I1009 15:00:33.685015 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9a0b-account-create-4fpch" event={"ID":"300c4244-27ca-4d76-b193-06b7fe2b551e","Type":"ContainerDied","Data":"af17c606dfd5e635986fc171df74b00b82ed945d433aa6ab1252781b3db646ec"} Oct 09 15:00:33 crc kubenswrapper[4762]: I1009 15:00:33.685051 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9a0b-account-create-4fpch" event={"ID":"300c4244-27ca-4d76-b193-06b7fe2b551e","Type":"ContainerStarted","Data":"ef59f1fc1a69afee10a57298d6b1a588dd3deb2d190945232272707635ad9180"} Oct 09 15:00:33 crc kubenswrapper[4762]: I1009 15:00:33.688603 4762 generic.go:334] "Generic (PLEG): container finished" podID="4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" containerID="3a79cb904b5e0ed0cae14c8432257ce531fe9a31c281860d08e2d41291e2d99f" exitCode=0 Oct 09 15:00:33 crc kubenswrapper[4762]: I1009 15:00:33.688688 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93aa-account-create-pfzbf" event={"ID":"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9","Type":"ContainerDied","Data":"3a79cb904b5e0ed0cae14c8432257ce531fe9a31c281860d08e2d41291e2d99f"} Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.073278 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.246958 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x9mc\" (UniqueName: \"kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc\") pod \"891a5bab-9068-4d0d-8d35-4892e7d6d48a\" (UID: \"891a5bab-9068-4d0d-8d35-4892e7d6d48a\") " Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.255050 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc" (OuterVolumeSpecName: "kube-api-access-9x9mc") pod "891a5bab-9068-4d0d-8d35-4892e7d6d48a" (UID: "891a5bab-9068-4d0d-8d35-4892e7d6d48a"). InnerVolumeSpecName "kube-api-access-9x9mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.351943 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x9mc\" (UniqueName: \"kubernetes.io/projected/891a5bab-9068-4d0d-8d35-4892e7d6d48a-kube-api-access-9x9mc\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.701765 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-bf89-account-create-xx7sk" Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.701610 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-bf89-account-create-xx7sk" event={"ID":"891a5bab-9068-4d0d-8d35-4892e7d6d48a","Type":"ContainerDied","Data":"858ee4648c388eee2f56d353b55a91d87f38dd119534d0804e6597f953aa75d1"} Oct 09 15:00:34 crc kubenswrapper[4762]: I1009 15:00:34.703383 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="858ee4648c388eee2f56d353b55a91d87f38dd119534d0804e6597f953aa75d1" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.129478 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.137930 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.172955 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g85ln\" (UniqueName: \"kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln\") pod \"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9\" (UID: \"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9\") " Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.173358 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lz68\" (UniqueName: \"kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68\") pod \"300c4244-27ca-4d76-b193-06b7fe2b551e\" (UID: \"300c4244-27ca-4d76-b193-06b7fe2b551e\") " Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.179844 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln" (OuterVolumeSpecName: "kube-api-access-g85ln") pod "4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" (UID: "4cc4bbe7-eeac-43a8-91f3-5549b4429ac9"). InnerVolumeSpecName "kube-api-access-g85ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.179915 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68" (OuterVolumeSpecName: "kube-api-access-8lz68") pod "300c4244-27ca-4d76-b193-06b7fe2b551e" (UID: "300c4244-27ca-4d76-b193-06b7fe2b551e"). InnerVolumeSpecName "kube-api-access-8lz68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.275304 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lz68\" (UniqueName: \"kubernetes.io/projected/300c4244-27ca-4d76-b193-06b7fe2b551e-kube-api-access-8lz68\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.275345 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g85ln\" (UniqueName: \"kubernetes.io/projected/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9-kube-api-access-g85ln\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.709752 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93aa-account-create-pfzbf" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.709745 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93aa-account-create-pfzbf" event={"ID":"4cc4bbe7-eeac-43a8-91f3-5549b4429ac9","Type":"ContainerDied","Data":"5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185"} Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.709965 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e486d1fc0844d9ca5e0987b3c70292a8a554579165b9720c09437d87022a185" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.711257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9a0b-account-create-4fpch" event={"ID":"300c4244-27ca-4d76-b193-06b7fe2b551e","Type":"ContainerDied","Data":"ef59f1fc1a69afee10a57298d6b1a588dd3deb2d190945232272707635ad9180"} Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.711283 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef59f1fc1a69afee10a57298d6b1a588dd3deb2d190945232272707635ad9180" Oct 09 15:00:35 crc kubenswrapper[4762]: I1009 15:00:35.711312 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9a0b-account-create-4fpch" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.944988 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-b62n4"] Oct 09 15:00:36 crc kubenswrapper[4762]: E1009 15:00:36.945668 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300c4244-27ca-4d76-b193-06b7fe2b551e" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945680 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="300c4244-27ca-4d76-b193-06b7fe2b551e" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: E1009 15:00:36.945692 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945699 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: E1009 15:00:36.945731 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891a5bab-9068-4d0d-8d35-4892e7d6d48a" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945738 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="891a5bab-9068-4d0d-8d35-4892e7d6d48a" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945898 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945921 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="300c4244-27ca-4d76-b193-06b7fe2b551e" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.945930 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="891a5bab-9068-4d0d-8d35-4892e7d6d48a" containerName="mariadb-account-create" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.946702 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.948740 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.949297 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zr59l" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.949817 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 15:00:36 crc kubenswrapper[4762]: I1009 15:00:36.959070 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-b62n4"] Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.110454 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.110534 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.110798 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsggr\" (UniqueName: \"kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.110926 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.211928 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.212020 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.212060 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.212103 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsggr\" (UniqueName: \"kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.216192 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.217114 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.218452 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.227996 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsggr\" (UniqueName: \"kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr\") pod \"nova-cell0-conductor-db-sync-b62n4\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.274217 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:37 crc kubenswrapper[4762]: I1009 15:00:37.725529 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-b62n4"] Oct 09 15:00:37 crc kubenswrapper[4762]: W1009 15:00:37.729291 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72b788ee_f766_41de_82e6_95d527473f2d.slice/crio-188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9 WatchSource:0}: Error finding container 188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9: Status 404 returned error can't find the container with id 188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9 Oct 09 15:00:38 crc kubenswrapper[4762]: I1009 15:00:38.740090 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-b62n4" event={"ID":"72b788ee-f766-41de-82e6-95d527473f2d","Type":"ContainerStarted","Data":"948ad3f235cf70edcf61eebcfc21a1726ff6620bb4364fa31c4ca44714c931fe"} Oct 09 15:00:38 crc kubenswrapper[4762]: I1009 15:00:38.740495 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-b62n4" event={"ID":"72b788ee-f766-41de-82e6-95d527473f2d","Type":"ContainerStarted","Data":"188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9"} Oct 09 15:00:38 crc kubenswrapper[4762]: I1009 15:00:38.763406 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-b62n4" podStartSLOduration=2.763387514 podStartE2EDuration="2.763387514s" podCreationTimestamp="2025-10-09 15:00:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:38.760990932 +0000 UTC m=+5714.534781991" watchObservedRunningTime="2025-10-09 15:00:38.763387514 +0000 UTC m=+5714.537178573" Oct 09 15:00:43 crc kubenswrapper[4762]: I1009 15:00:43.792548 4762 generic.go:334] "Generic (PLEG): container finished" podID="72b788ee-f766-41de-82e6-95d527473f2d" containerID="948ad3f235cf70edcf61eebcfc21a1726ff6620bb4364fa31c4ca44714c931fe" exitCode=0 Oct 09 15:00:43 crc kubenswrapper[4762]: I1009 15:00:43.792701 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-b62n4" event={"ID":"72b788ee-f766-41de-82e6-95d527473f2d","Type":"ContainerDied","Data":"948ad3f235cf70edcf61eebcfc21a1726ff6620bb4364fa31c4ca44714c931fe"} Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.122735 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.187181 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsggr\" (UniqueName: \"kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr\") pod \"72b788ee-f766-41de-82e6-95d527473f2d\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.187344 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts\") pod \"72b788ee-f766-41de-82e6-95d527473f2d\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.187374 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle\") pod \"72b788ee-f766-41de-82e6-95d527473f2d\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.187724 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data\") pod \"72b788ee-f766-41de-82e6-95d527473f2d\" (UID: \"72b788ee-f766-41de-82e6-95d527473f2d\") " Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.194543 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts" (OuterVolumeSpecName: "scripts") pod "72b788ee-f766-41de-82e6-95d527473f2d" (UID: "72b788ee-f766-41de-82e6-95d527473f2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.195923 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr" (OuterVolumeSpecName: "kube-api-access-wsggr") pod "72b788ee-f766-41de-82e6-95d527473f2d" (UID: "72b788ee-f766-41de-82e6-95d527473f2d"). InnerVolumeSpecName "kube-api-access-wsggr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.219332 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72b788ee-f766-41de-82e6-95d527473f2d" (UID: "72b788ee-f766-41de-82e6-95d527473f2d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.223229 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data" (OuterVolumeSpecName: "config-data") pod "72b788ee-f766-41de-82e6-95d527473f2d" (UID: "72b788ee-f766-41de-82e6-95d527473f2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.289713 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsggr\" (UniqueName: \"kubernetes.io/projected/72b788ee-f766-41de-82e6-95d527473f2d-kube-api-access-wsggr\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.289802 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.289815 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.289824 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b788ee-f766-41de-82e6-95d527473f2d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.812560 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-b62n4" event={"ID":"72b788ee-f766-41de-82e6-95d527473f2d","Type":"ContainerDied","Data":"188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9"} Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.812605 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="188d033e1ab3241145137b3943b07f0937f638d6e18fa6c9f251db17c551f1a9" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.812689 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-b62n4" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.898690 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:00:45 crc kubenswrapper[4762]: E1009 15:00:45.899081 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b788ee-f766-41de-82e6-95d527473f2d" containerName="nova-cell0-conductor-db-sync" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.899102 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b788ee-f766-41de-82e6-95d527473f2d" containerName="nova-cell0-conductor-db-sync" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.899292 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b788ee-f766-41de-82e6-95d527473f2d" containerName="nova-cell0-conductor-db-sync" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.899957 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.904623 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.904697 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-zr59l" Oct 09 15:00:45 crc kubenswrapper[4762]: I1009 15:00:45.911567 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.003258 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.003328 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljwbd\" (UniqueName: \"kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.003365 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.105276 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.105334 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljwbd\" (UniqueName: \"kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.105376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.111379 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.112029 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.125463 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljwbd\" (UniqueName: \"kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd\") pod \"nova-cell0-conductor-0\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.225955 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.670715 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:00:46 crc kubenswrapper[4762]: W1009 15:00:46.678818 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7a17d87_345b_4451_8d40_71321e9134bc.slice/crio-9d71b93cf083919683c698276c68ba295f3bef4804a7e87e401b11da56e87826 WatchSource:0}: Error finding container 9d71b93cf083919683c698276c68ba295f3bef4804a7e87e401b11da56e87826: Status 404 returned error can't find the container with id 9d71b93cf083919683c698276c68ba295f3bef4804a7e87e401b11da56e87826 Oct 09 15:00:46 crc kubenswrapper[4762]: I1009 15:00:46.824679 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f7a17d87-345b-4451-8d40-71321e9134bc","Type":"ContainerStarted","Data":"9d71b93cf083919683c698276c68ba295f3bef4804a7e87e401b11da56e87826"} Oct 09 15:00:47 crc kubenswrapper[4762]: I1009 15:00:47.834215 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f7a17d87-345b-4451-8d40-71321e9134bc","Type":"ContainerStarted","Data":"24fb88ee4db89227d1bba89ac38d3996ae7a6626c0d970e743ad5632790faff4"} Oct 09 15:00:47 crc kubenswrapper[4762]: I1009 15:00:47.836732 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:47 crc kubenswrapper[4762]: I1009 15:00:47.853062 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.853044916 podStartE2EDuration="2.853044916s" podCreationTimestamp="2025-10-09 15:00:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:47.851837144 +0000 UTC m=+5723.625628173" watchObservedRunningTime="2025-10-09 15:00:47.853044916 +0000 UTC m=+5723.626835955" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.249188 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.734330 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-xvkwg"] Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.735621 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.740797 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.741819 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.752235 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xvkwg"] Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.812718 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms9vg\" (UniqueName: \"kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.812818 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.813055 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.813118 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.907075 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.908581 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.914619 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.914789 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.914818 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.914844 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms9vg\" (UniqueName: \"kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.919518 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.922618 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.931478 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.941144 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.948160 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms9vg\" (UniqueName: \"kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg\") pod \"nova-cell0-cell-mapping-xvkwg\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.958467 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.968834 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.975005 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 15:00:51 crc kubenswrapper[4762]: I1009 15:00:51.988792 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.017018 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd6ws\" (UniqueName: \"kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.017168 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.017206 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.017233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.062246 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.073754 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.106107 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.107544 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.111435 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124294 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd6ws\" (UniqueName: \"kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124455 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124503 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124543 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124571 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124599 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxmzz\" (UniqueName: \"kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.124758 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.125977 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.132781 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.138503 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.153802 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.155446 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.159316 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.181850 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.183333 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd6ws\" (UniqueName: \"kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws\") pod \"nova-api-0\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.210623 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226333 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226396 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226425 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226521 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjxst\" (UniqueName: \"kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226550 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7bxs\" (UniqueName: \"kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226586 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.226951 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.227043 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxmzz\" (UniqueName: \"kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.227657 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.227830 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.230750 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.236198 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.244327 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxmzz\" (UniqueName: \"kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz\") pod \"nova-scheduler-0\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.254858 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.256707 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.268233 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330355 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjxst\" (UniqueName: \"kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7bxs\" (UniqueName: \"kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330480 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330513 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330582 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330632 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330720 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt7nl\" (UniqueName: \"kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.330944 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.331015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.331067 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.331091 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.331118 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.331596 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.335874 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.336701 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.337439 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.353365 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.356508 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7bxs\" (UniqueName: \"kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs\") pod \"nova-cell1-novncproxy-0\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.358367 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjxst\" (UniqueName: \"kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst\") pod \"nova-metadata-0\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.363433 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.401661 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.432910 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.433402 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.433428 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.433507 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.433524 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt7nl\" (UniqueName: \"kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.434599 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.434914 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.435927 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.436065 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.458013 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt7nl\" (UniqueName: \"kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl\") pod \"dnsmasq-dns-7c4b585c7-nmh5n\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.613790 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.621194 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-xvkwg"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.621616 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.654281 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.852008 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.903228 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xvkwg" event={"ID":"facc7c9e-6fec-46d2-946a-14be514a7699","Type":"ContainerStarted","Data":"0e3e7243e80ccf07f4bde1ebffbea8ce3eaa298ca67979230a4494c22197e09f"} Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.909490 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerStarted","Data":"12cde0e587515c2ec07e8aa27f46793140392d3820e705b1e207a69298beae1d"} Oct 09 15:00:52 crc kubenswrapper[4762]: I1009 15:00:52.963668 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.018389 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dlwwg"] Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.034111 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.037317 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.037709 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.039716 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dlwwg"] Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.149308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.149736 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.149768 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.149796 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9w6z\" (UniqueName: \"kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.202043 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:00:53 crc kubenswrapper[4762]: W1009 15:00:53.210259 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3784a5e_8e7b_43d8_b6db_0023141b1a1b.slice/crio-c71cb63f82312937d4d3220d83ffa9ac8cf2cf55bab052bd5b95e372bd910d32 WatchSource:0}: Error finding container c71cb63f82312937d4d3220d83ffa9ac8cf2cf55bab052bd5b95e372bd910d32: Status 404 returned error can't find the container with id c71cb63f82312937d4d3220d83ffa9ac8cf2cf55bab052bd5b95e372bd910d32 Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.217548 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.252840 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.252891 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9w6z\" (UniqueName: \"kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.253007 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.253050 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.258335 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.260809 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.269069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.270386 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9w6z\" (UniqueName: \"kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z\") pod \"nova-cell1-conductor-db-sync-dlwwg\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.383875 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:00:53 crc kubenswrapper[4762]: W1009 15:00:53.386586 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5afc5cff_4cae_4190_ae19_7b53163e3824.slice/crio-f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b WatchSource:0}: Error finding container f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b: Status 404 returned error can't find the container with id f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.458821 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.923229 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xvkwg" event={"ID":"facc7c9e-6fec-46d2-946a-14be514a7699","Type":"ContainerStarted","Data":"5bdcd51332ca5a6e7ca72c220021aecd3622b5dafc76a33f224fca303f4cc673"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.931188 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"96a2eb96-11dc-4ef6-9dec-fcd753eafea4","Type":"ContainerStarted","Data":"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.931237 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"96a2eb96-11dc-4ef6-9dec-fcd753eafea4","Type":"ContainerStarted","Data":"47a849ee73d6676b58cabeb62e6ca68a99ac5f0a2b73f50155545e3b30ea9341"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.948708 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dlwwg"] Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.948778 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-xvkwg" podStartSLOduration=2.948763132 podStartE2EDuration="2.948763132s" podCreationTimestamp="2025-10-09 15:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:53.942415637 +0000 UTC m=+5729.716206676" watchObservedRunningTime="2025-10-09 15:00:53.948763132 +0000 UTC m=+5729.722554171" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.955984 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerStarted","Data":"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.956043 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerStarted","Data":"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.975276 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4e974ec3-d1c0-49ae-a8c4-645673064d61","Type":"ContainerStarted","Data":"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.975327 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4e974ec3-d1c0-49ae-a8c4-645673064d61","Type":"ContainerStarted","Data":"7ae6f6124f1cc4ada4cf30825852ab0ac8828e087426a2647802e6ab5f5f27c7"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.979901 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.979879892 podStartE2EDuration="2.979879892s" podCreationTimestamp="2025-10-09 15:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:53.963706662 +0000 UTC m=+5729.737497721" watchObservedRunningTime="2025-10-09 15:00:53.979879892 +0000 UTC m=+5729.753670931" Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.985128 4762 generic.go:334] "Generic (PLEG): container finished" podID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerID="0191add82cf7774005aa261d004e69809edb71882a4814d6e3f21ce13afa00e8" exitCode=0 Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.985232 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" event={"ID":"5afc5cff-4cae-4190-ae19-7b53163e3824","Type":"ContainerDied","Data":"0191add82cf7774005aa261d004e69809edb71882a4814d6e3f21ce13afa00e8"} Oct 09 15:00:53 crc kubenswrapper[4762]: I1009 15:00:53.985265 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" event={"ID":"5afc5cff-4cae-4190-ae19-7b53163e3824","Type":"ContainerStarted","Data":"f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b"} Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.000067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerStarted","Data":"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8"} Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.000104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerStarted","Data":"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72"} Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.000115 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerStarted","Data":"c71cb63f82312937d4d3220d83ffa9ac8cf2cf55bab052bd5b95e372bd910d32"} Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.046520 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.046491907 podStartE2EDuration="3.046491907s" podCreationTimestamp="2025-10-09 15:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:54.000019277 +0000 UTC m=+5729.773810336" watchObservedRunningTime="2025-10-09 15:00:54.046491907 +0000 UTC m=+5729.820282956" Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.104669 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.104626431 podStartE2EDuration="3.104626431s" podCreationTimestamp="2025-10-09 15:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:54.029104704 +0000 UTC m=+5729.802895743" watchObservedRunningTime="2025-10-09 15:00:54.104626431 +0000 UTC m=+5729.878417470" Oct 09 15:00:54 crc kubenswrapper[4762]: I1009 15:00:54.208374 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.208347881 podStartE2EDuration="3.208347881s" podCreationTimestamp="2025-10-09 15:00:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:54.126711515 +0000 UTC m=+5729.900502554" watchObservedRunningTime="2025-10-09 15:00:54.208347881 +0000 UTC m=+5729.982138920" Oct 09 15:00:55 crc kubenswrapper[4762]: I1009 15:00:55.011687 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" event={"ID":"38c6c050-8822-475a-b959-3ea5306a1d21","Type":"ContainerStarted","Data":"f29f9ad0f1d37d93ffeab4ac6759cec89c36c1a4d5d98cb8254d3eb747786fe4"} Oct 09 15:00:55 crc kubenswrapper[4762]: I1009 15:00:55.012042 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" event={"ID":"38c6c050-8822-475a-b959-3ea5306a1d21","Type":"ContainerStarted","Data":"068b4b48b5c7eba231c7d7f282dce75b455dc9df56bdfc7ec5dfec894ebe8aff"} Oct 09 15:00:55 crc kubenswrapper[4762]: I1009 15:00:55.014296 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" event={"ID":"5afc5cff-4cae-4190-ae19-7b53163e3824","Type":"ContainerStarted","Data":"b2a7512f49583bfa03624913cb2fe31a8e8cfc8af65db7142a5c1c571d761056"} Oct 09 15:00:55 crc kubenswrapper[4762]: I1009 15:00:55.025588 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" podStartSLOduration=3.025570856 podStartE2EDuration="3.025570856s" podCreationTimestamp="2025-10-09 15:00:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:55.024622962 +0000 UTC m=+5730.798414001" watchObservedRunningTime="2025-10-09 15:00:55.025570856 +0000 UTC m=+5730.799361895" Oct 09 15:00:55 crc kubenswrapper[4762]: I1009 15:00:55.048542 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" podStartSLOduration=3.048525314 podStartE2EDuration="3.048525314s" podCreationTimestamp="2025-10-09 15:00:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:00:55.042412084 +0000 UTC m=+5730.816203113" watchObservedRunningTime="2025-10-09 15:00:55.048525314 +0000 UTC m=+5730.822316353" Oct 09 15:00:56 crc kubenswrapper[4762]: I1009 15:00:56.025644 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:00:57 crc kubenswrapper[4762]: I1009 15:00:57.402896 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 15:00:57 crc kubenswrapper[4762]: I1009 15:00:57.614004 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:00:57 crc kubenswrapper[4762]: I1009 15:00:57.621746 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:00:57 crc kubenswrapper[4762]: I1009 15:00:57.621803 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:00:58 crc kubenswrapper[4762]: I1009 15:00:58.088971 4762 generic.go:334] "Generic (PLEG): container finished" podID="facc7c9e-6fec-46d2-946a-14be514a7699" containerID="5bdcd51332ca5a6e7ca72c220021aecd3622b5dafc76a33f224fca303f4cc673" exitCode=0 Oct 09 15:00:58 crc kubenswrapper[4762]: I1009 15:00:58.089055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xvkwg" event={"ID":"facc7c9e-6fec-46d2-946a-14be514a7699","Type":"ContainerDied","Data":"5bdcd51332ca5a6e7ca72c220021aecd3622b5dafc76a33f224fca303f4cc673"} Oct 09 15:00:58 crc kubenswrapper[4762]: I1009 15:00:58.090862 4762 generic.go:334] "Generic (PLEG): container finished" podID="38c6c050-8822-475a-b959-3ea5306a1d21" containerID="f29f9ad0f1d37d93ffeab4ac6759cec89c36c1a4d5d98cb8254d3eb747786fe4" exitCode=0 Oct 09 15:00:58 crc kubenswrapper[4762]: I1009 15:00:58.090942 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" event={"ID":"38c6c050-8822-475a-b959-3ea5306a1d21","Type":"ContainerDied","Data":"f29f9ad0f1d37d93ffeab4ac6759cec89c36c1a4d5d98cb8254d3eb747786fe4"} Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.523836 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.532789 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605421 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms9vg\" (UniqueName: \"kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg\") pod \"facc7c9e-6fec-46d2-946a-14be514a7699\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605605 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts\") pod \"facc7c9e-6fec-46d2-946a-14be514a7699\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605663 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts\") pod \"38c6c050-8822-475a-b959-3ea5306a1d21\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605796 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9w6z\" (UniqueName: \"kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z\") pod \"38c6c050-8822-475a-b959-3ea5306a1d21\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605843 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle\") pod \"facc7c9e-6fec-46d2-946a-14be514a7699\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605894 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle\") pod \"38c6c050-8822-475a-b959-3ea5306a1d21\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605921 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data\") pod \"facc7c9e-6fec-46d2-946a-14be514a7699\" (UID: \"facc7c9e-6fec-46d2-946a-14be514a7699\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.605970 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data\") pod \"38c6c050-8822-475a-b959-3ea5306a1d21\" (UID: \"38c6c050-8822-475a-b959-3ea5306a1d21\") " Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.611088 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts" (OuterVolumeSpecName: "scripts") pod "facc7c9e-6fec-46d2-946a-14be514a7699" (UID: "facc7c9e-6fec-46d2-946a-14be514a7699"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.611807 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg" (OuterVolumeSpecName: "kube-api-access-ms9vg") pod "facc7c9e-6fec-46d2-946a-14be514a7699" (UID: "facc7c9e-6fec-46d2-946a-14be514a7699"). InnerVolumeSpecName "kube-api-access-ms9vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.612685 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z" (OuterVolumeSpecName: "kube-api-access-q9w6z") pod "38c6c050-8822-475a-b959-3ea5306a1d21" (UID: "38c6c050-8822-475a-b959-3ea5306a1d21"). InnerVolumeSpecName "kube-api-access-q9w6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.613243 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts" (OuterVolumeSpecName: "scripts") pod "38c6c050-8822-475a-b959-3ea5306a1d21" (UID: "38c6c050-8822-475a-b959-3ea5306a1d21"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.633223 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data" (OuterVolumeSpecName: "config-data") pod "38c6c050-8822-475a-b959-3ea5306a1d21" (UID: "38c6c050-8822-475a-b959-3ea5306a1d21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.641950 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data" (OuterVolumeSpecName: "config-data") pod "facc7c9e-6fec-46d2-946a-14be514a7699" (UID: "facc7c9e-6fec-46d2-946a-14be514a7699"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.642734 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38c6c050-8822-475a-b959-3ea5306a1d21" (UID: "38c6c050-8822-475a-b959-3ea5306a1d21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.645162 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "facc7c9e-6fec-46d2-946a-14be514a7699" (UID: "facc7c9e-6fec-46d2-946a-14be514a7699"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708253 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9w6z\" (UniqueName: \"kubernetes.io/projected/38c6c050-8822-475a-b959-3ea5306a1d21-kube-api-access-q9w6z\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708433 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708518 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708572 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708623 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708693 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms9vg\" (UniqueName: \"kubernetes.io/projected/facc7c9e-6fec-46d2-946a-14be514a7699-kube-api-access-ms9vg\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708743 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/facc7c9e-6fec-46d2-946a-14be514a7699-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:00:59 crc kubenswrapper[4762]: I1009 15:00:59.708803 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38c6c050-8822-475a-b959-3ea5306a1d21-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.113881 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-xvkwg" event={"ID":"facc7c9e-6fec-46d2-946a-14be514a7699","Type":"ContainerDied","Data":"0e3e7243e80ccf07f4bde1ebffbea8ce3eaa298ca67979230a4494c22197e09f"} Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.113925 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-xvkwg" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.113932 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e3e7243e80ccf07f4bde1ebffbea8ce3eaa298ca67979230a4494c22197e09f" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.115546 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" event={"ID":"38c6c050-8822-475a-b959-3ea5306a1d21","Type":"ContainerDied","Data":"068b4b48b5c7eba231c7d7f282dce75b455dc9df56bdfc7ec5dfec894ebe8aff"} Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.115579 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="068b4b48b5c7eba231c7d7f282dce75b455dc9df56bdfc7ec5dfec894ebe8aff" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.115662 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dlwwg" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.134353 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29333701-x8pdl"] Oct 09 15:01:00 crc kubenswrapper[4762]: E1009 15:01:00.135108 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38c6c050-8822-475a-b959-3ea5306a1d21" containerName="nova-cell1-conductor-db-sync" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.135211 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="38c6c050-8822-475a-b959-3ea5306a1d21" containerName="nova-cell1-conductor-db-sync" Oct 09 15:01:00 crc kubenswrapper[4762]: E1009 15:01:00.135295 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="facc7c9e-6fec-46d2-946a-14be514a7699" containerName="nova-manage" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.135363 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="facc7c9e-6fec-46d2-946a-14be514a7699" containerName="nova-manage" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.135791 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="38c6c050-8822-475a-b959-3ea5306a1d21" containerName="nova-cell1-conductor-db-sync" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.135950 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="facc7c9e-6fec-46d2-946a-14be514a7699" containerName="nova-manage" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.136976 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.179963 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333701-x8pdl"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.217781 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.219325 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.222133 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.225372 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.225539 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.225604 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wjbs\" (UniqueName: \"kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.225688 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.233007 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.327910 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328347 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wjbs\" (UniqueName: \"kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328422 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6898d\" (UniqueName: \"kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328462 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328616 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328827 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.328885 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.333398 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.333813 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.334023 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.344984 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wjbs\" (UniqueName: \"kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs\") pod \"keystone-cron-29333701-x8pdl\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.378465 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.379064 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-log" containerID="cri-o://aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" gracePeriod=30 Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.379181 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-api" containerID="cri-o://7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" gracePeriod=30 Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.435326 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6898d\" (UniqueName: \"kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.435521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.435800 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.445678 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.449239 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.463902 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6898d\" (UniqueName: \"kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d\") pod \"nova-cell1-conductor-0\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.469905 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.472459 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4e974ec3-d1c0-49ae-a8c4-645673064d61" containerName="nova-scheduler-scheduler" containerID="cri-o://f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e" gracePeriod=30 Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.484873 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.488603 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.488891 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-log" containerID="cri-o://893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" gracePeriod=30 Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.489443 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-metadata" containerID="cri-o://958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" gracePeriod=30 Oct 09 15:01:00 crc kubenswrapper[4762]: I1009 15:01:00.536983 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.002471 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333701-x8pdl"] Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.026891 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.103858 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.134901 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333701-x8pdl" event={"ID":"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf","Type":"ContainerStarted","Data":"34ec14764a9a760fd135327805d7eceecf549b3392b4f1f2c30832b432340d41"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.150692 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs\") pod \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.151147 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") pod \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.151216 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd6ws\" (UniqueName: \"kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws\") pod \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.151276 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle\") pod \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.157123 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs" (OuterVolumeSpecName: "logs") pod "5c5aa839-5f42-42ca-9fae-8ed580491bd7" (UID: "5c5aa839-5f42-42ca-9fae-8ed580491bd7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158161 4762 generic.go:334] "Generic (PLEG): container finished" podID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerID="958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" exitCode=0 Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158192 4762 generic.go:334] "Generic (PLEG): container finished" podID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerID="893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" exitCode=143 Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158236 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerDied","Data":"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158266 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerDied","Data":"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158275 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3784a5e-8e7b-43d8-b6db-0023141b1a1b","Type":"ContainerDied","Data":"c71cb63f82312937d4d3220d83ffa9ac8cf2cf55bab052bd5b95e372bd910d32"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158290 4762 scope.go:117] "RemoveContainer" containerID="958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.158406 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.161110 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws" (OuterVolumeSpecName: "kube-api-access-hd6ws") pod "5c5aa839-5f42-42ca-9fae-8ed580491bd7" (UID: "5c5aa839-5f42-42ca-9fae-8ed580491bd7"). InnerVolumeSpecName "kube-api-access-hd6ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.163714 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.169003 4762 generic.go:334] "Generic (PLEG): container finished" podID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerID="7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" exitCode=0 Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.169163 4762 generic.go:334] "Generic (PLEG): container finished" podID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerID="aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" exitCode=143 Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.169259 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerDied","Data":"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.169359 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerDied","Data":"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.171249 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c5aa839-5f42-42ca-9fae-8ed580491bd7","Type":"ContainerDied","Data":"12cde0e587515c2ec07e8aa27f46793140392d3820e705b1e207a69298beae1d"} Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.169384 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.181973 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data podName:5c5aa839-5f42-42ca-9fae-8ed580491bd7 nodeName:}" failed. No retries permitted until 2025-10-09 15:01:01.681939402 +0000 UTC m=+5737.455730441 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data") pod "5c5aa839-5f42-42ca-9fae-8ed580491bd7" (UID: "5c5aa839-5f42-42ca-9fae-8ed580491bd7") : error deleting /var/lib/kubelet/pods/5c5aa839-5f42-42ca-9fae-8ed580491bd7/volume-subpaths: remove /var/lib/kubelet/pods/5c5aa839-5f42-42ca-9fae-8ed580491bd7/volume-subpaths: no such file or directory Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.186563 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c5aa839-5f42-42ca-9fae-8ed580491bd7" (UID: "5c5aa839-5f42-42ca-9fae-8ed580491bd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.191933 4762 scope.go:117] "RemoveContainer" containerID="893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.227881 4762 scope.go:117] "RemoveContainer" containerID="958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.228475 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8\": container with ID starting with 958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8 not found: ID does not exist" containerID="958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.228536 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8"} err="failed to get container status \"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8\": rpc error: code = NotFound desc = could not find container \"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8\": container with ID starting with 958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8 not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.228578 4762 scope.go:117] "RemoveContainer" containerID="893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.228995 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72\": container with ID starting with 893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72 not found: ID does not exist" containerID="893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229030 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72"} err="failed to get container status \"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72\": rpc error: code = NotFound desc = could not find container \"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72\": container with ID starting with 893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72 not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229059 4762 scope.go:117] "RemoveContainer" containerID="958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229422 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8"} err="failed to get container status \"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8\": rpc error: code = NotFound desc = could not find container \"958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8\": container with ID starting with 958fe7aa4b8f84332c69f6599680142210c8059d84b6778a0de19e53f1f3c4f8 not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229458 4762 scope.go:117] "RemoveContainer" containerID="893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229702 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72"} err="failed to get container status \"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72\": rpc error: code = NotFound desc = could not find container \"893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72\": container with ID starting with 893ceac955f289406dfcd1ce0d50f6f26ad14f100a1f35e4c1c69ad0f256ff72 not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.229728 4762 scope.go:117] "RemoveContainer" containerID="7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.250575 4762 scope.go:117] "RemoveContainer" containerID="aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.252652 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjxst\" (UniqueName: \"kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst\") pod \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.252742 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data\") pod \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.252827 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle\") pod \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.252856 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs\") pod \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\" (UID: \"d3784a5e-8e7b-43d8-b6db-0023141b1a1b\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.253377 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd6ws\" (UniqueName: \"kubernetes.io/projected/5c5aa839-5f42-42ca-9fae-8ed580491bd7-kube-api-access-hd6ws\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.253392 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.253401 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c5aa839-5f42-42ca-9fae-8ed580491bd7-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.253708 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs" (OuterVolumeSpecName: "logs") pod "d3784a5e-8e7b-43d8-b6db-0023141b1a1b" (UID: "d3784a5e-8e7b-43d8-b6db-0023141b1a1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.260558 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst" (OuterVolumeSpecName: "kube-api-access-zjxst") pod "d3784a5e-8e7b-43d8-b6db-0023141b1a1b" (UID: "d3784a5e-8e7b-43d8-b6db-0023141b1a1b"). InnerVolumeSpecName "kube-api-access-zjxst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.270324 4762 scope.go:117] "RemoveContainer" containerID="7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.270897 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda\": container with ID starting with 7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda not found: ID does not exist" containerID="7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.270937 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda"} err="failed to get container status \"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda\": rpc error: code = NotFound desc = could not find container \"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda\": container with ID starting with 7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.270963 4762 scope.go:117] "RemoveContainer" containerID="aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.271299 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed\": container with ID starting with aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed not found: ID does not exist" containerID="aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.271322 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed"} err="failed to get container status \"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed\": rpc error: code = NotFound desc = could not find container \"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed\": container with ID starting with aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.271339 4762 scope.go:117] "RemoveContainer" containerID="7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.271989 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda"} err="failed to get container status \"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda\": rpc error: code = NotFound desc = could not find container \"7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda\": container with ID starting with 7b9c8f04af038809f23df2f105ee141e4a1032169bae03be78c7eb1203e93dda not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.272011 4762 scope.go:117] "RemoveContainer" containerID="aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.272943 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed"} err="failed to get container status \"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed\": rpc error: code = NotFound desc = could not find container \"aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed\": container with ID starting with aa284dd72bdce8dd98c0408cda450df9a8ee9bcfe7aeb650380ab5c36ee043ed not found: ID does not exist" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.283247 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3784a5e-8e7b-43d8-b6db-0023141b1a1b" (UID: "d3784a5e-8e7b-43d8-b6db-0023141b1a1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.287167 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data" (OuterVolumeSpecName: "config-data") pod "d3784a5e-8e7b-43d8-b6db-0023141b1a1b" (UID: "d3784a5e-8e7b-43d8-b6db-0023141b1a1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.355253 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjxst\" (UniqueName: \"kubernetes.io/projected/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-kube-api-access-zjxst\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.355291 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.355301 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.355311 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3784a5e-8e7b-43d8-b6db-0023141b1a1b-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.494151 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.511265 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.520695 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.521238 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-api" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.521335 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-api" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.521414 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-log" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.521480 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-log" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.521539 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-log" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.521604 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-log" Oct 09 15:01:01 crc kubenswrapper[4762]: E1009 15:01:01.521687 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-metadata" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.521742 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-metadata" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.521998 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-log" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.522081 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-log" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.522139 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" containerName="nova-api-api" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.522202 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" containerName="nova-metadata-metadata" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.523457 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.527164 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.529906 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.660611 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sz6x\" (UniqueName: \"kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.660680 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.660755 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.660817 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.762692 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") pod \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\" (UID: \"5c5aa839-5f42-42ca-9fae-8ed580491bd7\") " Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.763177 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.763330 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sz6x\" (UniqueName: \"kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.763363 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.763418 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.763904 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.767043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.767208 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data" (OuterVolumeSpecName: "config-data") pod "5c5aa839-5f42-42ca-9fae-8ed580491bd7" (UID: "5c5aa839-5f42-42ca-9fae-8ed580491bd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.772251 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.790408 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sz6x\" (UniqueName: \"kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x\") pod \"nova-metadata-0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.856685 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:01 crc kubenswrapper[4762]: I1009 15:01:01.864897 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c5aa839-5f42-42ca-9fae-8ed580491bd7-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.005046 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.016842 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.029775 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.044039 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.044248 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.049585 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.171176 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.171585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc74k\" (UniqueName: \"kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.171746 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.171805 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.186562 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333701-x8pdl" event={"ID":"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf","Type":"ContainerStarted","Data":"387e81d1b8af94d0ff5e8b3b8370559922c262fc62e2ff12d79d1e18caad18c3"} Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.191230 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1d97958c-5c80-4412-8277-d6ab937f9d0d","Type":"ContainerStarted","Data":"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb"} Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.191282 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1d97958c-5c80-4412-8277-d6ab937f9d0d","Type":"ContainerStarted","Data":"25dd514855a987b3e3453b3c9406de8311a0b4ba59405a8040ac5ebc6fda9354"} Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.191363 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.203894 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29333701-x8pdl" podStartSLOduration=2.203874197 podStartE2EDuration="2.203874197s" podCreationTimestamp="2025-10-09 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:02.202289676 +0000 UTC m=+5737.976080715" watchObservedRunningTime="2025-10-09 15:01:02.203874197 +0000 UTC m=+5737.977665236" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.273705 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.273877 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.274017 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.274183 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc74k\" (UniqueName: \"kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.276668 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.288024 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.292182 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.298205 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc74k\" (UniqueName: \"kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k\") pod \"nova-api-0\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.341926 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.341901421 podStartE2EDuration="2.341901421s" podCreationTimestamp="2025-10-09 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:02.218490738 +0000 UTC m=+5737.992281777" watchObservedRunningTime="2025-10-09 15:01:02.341901421 +0000 UTC m=+5738.115692460" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.348518 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: W1009 15:01:02.351142 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbaacaa1_30a1_472f_bb6b_bdbc1aab34a0.slice/crio-3e250a711316f592ebbb408bdd7d5604beb49667c23a80460c7c92edd0ed45f2 WatchSource:0}: Error finding container 3e250a711316f592ebbb408bdd7d5604beb49667c23a80460c7c92edd0ed45f2: Status 404 returned error can't find the container with id 3e250a711316f592ebbb408bdd7d5604beb49667c23a80460c7c92edd0ed45f2 Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.365010 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.622576 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.648025 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.658004 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.772706 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.772940 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="dnsmasq-dns" containerID="cri-o://0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d" gracePeriod=10 Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.860047 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.986206 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c5aa839-5f42-42ca-9fae-8ed580491bd7" path="/var/lib/kubelet/pods/5c5aa839-5f42-42ca-9fae-8ed580491bd7/volumes" Oct 09 15:01:02 crc kubenswrapper[4762]: I1009 15:01:02.987120 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3784a5e-8e7b-43d8-b6db-0023141b1a1b" path="/var/lib/kubelet/pods/d3784a5e-8e7b-43d8-b6db-0023141b1a1b/volumes" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.193249 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.252543 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerStarted","Data":"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.252610 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerStarted","Data":"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.252658 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerStarted","Data":"3e250a711316f592ebbb408bdd7d5604beb49667c23a80460c7c92edd0ed45f2"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.265935 4762 generic.go:334] "Generic (PLEG): container finished" podID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerID="0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d" exitCode=0 Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.266012 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" event={"ID":"b7391852-4a4f-440c-aaf1-5d02555553e7","Type":"ContainerDied","Data":"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.266044 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" event={"ID":"b7391852-4a4f-440c-aaf1-5d02555553e7","Type":"ContainerDied","Data":"5ed54814350838f3f03d39c0e962bd82a1035749544c08a91339cce47ade3603"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.266065 4762 scope.go:117] "RemoveContainer" containerID="0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.266242 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-649fc75c7c-h4tvb" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.283957 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.283934536 podStartE2EDuration="2.283934536s" podCreationTimestamp="2025-10-09 15:01:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:03.274443929 +0000 UTC m=+5739.048234978" watchObservedRunningTime="2025-10-09 15:01:03.283934536 +0000 UTC m=+5739.057725575" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.285128 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerStarted","Data":"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.285169 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerStarted","Data":"8fcd37aae434a63e15513316720129d7ad7a6e63aac6d39c388d50444a0e51f4"} Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.293796 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb\") pod \"b7391852-4a4f-440c-aaf1-5d02555553e7\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.294017 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkmfr\" (UniqueName: \"kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr\") pod \"b7391852-4a4f-440c-aaf1-5d02555553e7\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.294066 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb\") pod \"b7391852-4a4f-440c-aaf1-5d02555553e7\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.294095 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc\") pod \"b7391852-4a4f-440c-aaf1-5d02555553e7\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.294191 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config\") pod \"b7391852-4a4f-440c-aaf1-5d02555553e7\" (UID: \"b7391852-4a4f-440c-aaf1-5d02555553e7\") " Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.296250 4762 scope.go:117] "RemoveContainer" containerID="f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.297302 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.304837 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr" (OuterVolumeSpecName: "kube-api-access-zkmfr") pod "b7391852-4a4f-440c-aaf1-5d02555553e7" (UID: "b7391852-4a4f-440c-aaf1-5d02555553e7"). InnerVolumeSpecName "kube-api-access-zkmfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.396350 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkmfr\" (UniqueName: \"kubernetes.io/projected/b7391852-4a4f-440c-aaf1-5d02555553e7-kube-api-access-zkmfr\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.413060 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7391852-4a4f-440c-aaf1-5d02555553e7" (UID: "b7391852-4a4f-440c-aaf1-5d02555553e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.420025 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config" (OuterVolumeSpecName: "config") pod "b7391852-4a4f-440c-aaf1-5d02555553e7" (UID: "b7391852-4a4f-440c-aaf1-5d02555553e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.420651 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b7391852-4a4f-440c-aaf1-5d02555553e7" (UID: "b7391852-4a4f-440c-aaf1-5d02555553e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.429395 4762 scope.go:117] "RemoveContainer" containerID="0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d" Oct 09 15:01:03 crc kubenswrapper[4762]: E1009 15:01:03.431798 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d\": container with ID starting with 0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d not found: ID does not exist" containerID="0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.431848 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d"} err="failed to get container status \"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d\": rpc error: code = NotFound desc = could not find container \"0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d\": container with ID starting with 0c34795c2d3d01d1a824bbd907f9eca6b86263ee424d11a0dcb85a372e66f36d not found: ID does not exist" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.431903 4762 scope.go:117] "RemoveContainer" containerID="f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40" Oct 09 15:01:03 crc kubenswrapper[4762]: E1009 15:01:03.432340 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40\": container with ID starting with f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40 not found: ID does not exist" containerID="f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.432370 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40"} err="failed to get container status \"f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40\": rpc error: code = NotFound desc = could not find container \"f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40\": container with ID starting with f5b24c1b65e8d9af0d93034d889f53147ca425fb02097f58a58dcabab651be40 not found: ID does not exist" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.447625 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b7391852-4a4f-440c-aaf1-5d02555553e7" (UID: "b7391852-4a4f-440c-aaf1-5d02555553e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.498471 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.498506 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.498516 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.498527 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b7391852-4a4f-440c-aaf1-5d02555553e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.611334 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 15:01:03 crc kubenswrapper[4762]: I1009 15:01:03.620723 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-649fc75c7c-h4tvb"] Oct 09 15:01:04 crc kubenswrapper[4762]: I1009 15:01:04.295276 4762 generic.go:334] "Generic (PLEG): container finished" podID="7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" containerID="387e81d1b8af94d0ff5e8b3b8370559922c262fc62e2ff12d79d1e18caad18c3" exitCode=0 Oct 09 15:01:04 crc kubenswrapper[4762]: I1009 15:01:04.295353 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333701-x8pdl" event={"ID":"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf","Type":"ContainerDied","Data":"387e81d1b8af94d0ff5e8b3b8370559922c262fc62e2ff12d79d1e18caad18c3"} Oct 09 15:01:04 crc kubenswrapper[4762]: I1009 15:01:04.297173 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerStarted","Data":"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2"} Oct 09 15:01:04 crc kubenswrapper[4762]: I1009 15:01:04.337315 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.337292069 podStartE2EDuration="2.337292069s" podCreationTimestamp="2025-10-09 15:01:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:04.330654596 +0000 UTC m=+5740.104445635" watchObservedRunningTime="2025-10-09 15:01:04.337292069 +0000 UTC m=+5740.111083108" Oct 09 15:01:04 crc kubenswrapper[4762]: I1009 15:01:04.975929 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" path="/var/lib/kubelet/pods/b7391852-4a4f-440c-aaf1-5d02555553e7/volumes" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.272985 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.309577 4762 generic.go:334] "Generic (PLEG): container finished" podID="4e974ec3-d1c0-49ae-a8c4-645673064d61" containerID="f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e" exitCode=0 Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.309830 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.310720 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4e974ec3-d1c0-49ae-a8c4-645673064d61","Type":"ContainerDied","Data":"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e"} Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.310761 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4e974ec3-d1c0-49ae-a8c4-645673064d61","Type":"ContainerDied","Data":"7ae6f6124f1cc4ada4cf30825852ab0ac8828e087426a2647802e6ab5f5f27c7"} Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.310778 4762 scope.go:117] "RemoveContainer" containerID="f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.333493 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle\") pod \"4e974ec3-d1c0-49ae-a8c4-645673064d61\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.333715 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxmzz\" (UniqueName: \"kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz\") pod \"4e974ec3-d1c0-49ae-a8c4-645673064d61\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.333768 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data\") pod \"4e974ec3-d1c0-49ae-a8c4-645673064d61\" (UID: \"4e974ec3-d1c0-49ae-a8c4-645673064d61\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.337884 4762 scope.go:117] "RemoveContainer" containerID="f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e" Oct 09 15:01:05 crc kubenswrapper[4762]: E1009 15:01:05.338468 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e\": container with ID starting with f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e not found: ID does not exist" containerID="f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.338509 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e"} err="failed to get container status \"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e\": rpc error: code = NotFound desc = could not find container \"f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e\": container with ID starting with f9fd8299b05b4f79a129a485f37f5a7df573d3283c61176edfb52f50f847202e not found: ID does not exist" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.339940 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz" (OuterVolumeSpecName: "kube-api-access-qxmzz") pod "4e974ec3-d1c0-49ae-a8c4-645673064d61" (UID: "4e974ec3-d1c0-49ae-a8c4-645673064d61"). InnerVolumeSpecName "kube-api-access-qxmzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.359187 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data" (OuterVolumeSpecName: "config-data") pod "4e974ec3-d1c0-49ae-a8c4-645673064d61" (UID: "4e974ec3-d1c0-49ae-a8c4-645673064d61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.362979 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e974ec3-d1c0-49ae-a8c4-645673064d61" (UID: "4e974ec3-d1c0-49ae-a8c4-645673064d61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.436418 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.436461 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxmzz\" (UniqueName: \"kubernetes.io/projected/4e974ec3-d1c0-49ae-a8c4-645673064d61-kube-api-access-qxmzz\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.436472 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e974ec3-d1c0-49ae-a8c4-645673064d61-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.553924 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.642508 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wjbs\" (UniqueName: \"kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs\") pod \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.642625 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data\") pod \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.642712 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle\") pod \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.642856 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys\") pod \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\" (UID: \"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf\") " Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.648143 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" (UID: "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.650023 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs" (OuterVolumeSpecName: "kube-api-access-6wjbs") pod "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" (UID: "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf"). InnerVolumeSpecName "kube-api-access-6wjbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.666514 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.678069 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.678704 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" (UID: "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.690720 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:05 crc kubenswrapper[4762]: E1009 15:01:05.691222 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" containerName="keystone-cron" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691246 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" containerName="keystone-cron" Oct 09 15:01:05 crc kubenswrapper[4762]: E1009 15:01:05.691272 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="dnsmasq-dns" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691278 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="dnsmasq-dns" Oct 09 15:01:05 crc kubenswrapper[4762]: E1009 15:01:05.691303 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="init" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691310 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="init" Oct 09 15:01:05 crc kubenswrapper[4762]: E1009 15:01:05.691317 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e974ec3-d1c0-49ae-a8c4-645673064d61" containerName="nova-scheduler-scheduler" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691324 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e974ec3-d1c0-49ae-a8c4-645673064d61" containerName="nova-scheduler-scheduler" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691506 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e974ec3-d1c0-49ae-a8c4-645673064d61" containerName="nova-scheduler-scheduler" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691520 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7391852-4a4f-440c-aaf1-5d02555553e7" containerName="dnsmasq-dns" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.691531 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" containerName="keystone-cron" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.692268 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.695529 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.699835 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.712294 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data" (OuterVolumeSpecName: "config-data") pod "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf" (UID: "7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.749933 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.749999 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.750059 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjqqq\" (UniqueName: \"kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.750232 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wjbs\" (UniqueName: \"kubernetes.io/projected/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-kube-api-access-6wjbs\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.750391 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.750413 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.750425 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.853124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.853205 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.853313 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjqqq\" (UniqueName: \"kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.857527 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.860213 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:05 crc kubenswrapper[4762]: I1009 15:01:05.872134 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjqqq\" (UniqueName: \"kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq\") pod \"nova-scheduler-0\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.013444 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.321247 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333701-x8pdl" event={"ID":"7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf","Type":"ContainerDied","Data":"34ec14764a9a760fd135327805d7eceecf549b3392b4f1f2c30832b432340d41"} Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.321575 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34ec14764a9a760fd135327805d7eceecf549b3392b4f1f2c30832b432340d41" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.321367 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333701-x8pdl" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.488059 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.857521 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.857589 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:01:06 crc kubenswrapper[4762]: I1009 15:01:06.976083 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e974ec3-d1c0-49ae-a8c4-645673064d61" path="/var/lib/kubelet/pods/4e974ec3-d1c0-49ae-a8c4-645673064d61/volumes" Oct 09 15:01:07 crc kubenswrapper[4762]: I1009 15:01:07.331414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0692db6-e35e-436d-81d6-f587c0ad34dd","Type":"ContainerStarted","Data":"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429"} Oct 09 15:01:07 crc kubenswrapper[4762]: I1009 15:01:07.331735 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0692db6-e35e-436d-81d6-f587c0ad34dd","Type":"ContainerStarted","Data":"f3086ace1f57edb488b5e03369f95db5cefd10d7795d531013c7aaffac4fd637"} Oct 09 15:01:07 crc kubenswrapper[4762]: I1009 15:01:07.355370 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.355352902 podStartE2EDuration="2.355352902s" podCreationTimestamp="2025-10-09 15:01:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:07.347244511 +0000 UTC m=+5743.121035550" watchObservedRunningTime="2025-10-09 15:01:07.355352902 +0000 UTC m=+5743.129143941" Oct 09 15:01:10 crc kubenswrapper[4762]: I1009 15:01:10.562389 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.014617 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.144798 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-gzfmt"] Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.147166 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.150127 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.150536 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.157976 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-gzfmt"] Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.262765 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.262873 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.262974 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9xxv\" (UniqueName: \"kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.263013 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.364159 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9xxv\" (UniqueName: \"kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.364221 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.364249 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.364314 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.371072 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.371143 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.372086 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.381986 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9xxv\" (UniqueName: \"kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv\") pod \"nova-cell1-cell-mapping-gzfmt\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.487785 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.857261 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.857398 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.925891 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-gzfmt"] Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.969594 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:01:11 crc kubenswrapper[4762]: I1009 15:01:11.969916 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.367953 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.369612 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.386002 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gzfmt" event={"ID":"b2422116-f72c-42ae-83b2-084763842592","Type":"ContainerStarted","Data":"2978feb96559cfc45dc0e290c0d2243b5ba1cd4945be26e3c592f451540755e5"} Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.386403 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gzfmt" event={"ID":"b2422116-f72c-42ae-83b2-084763842592","Type":"ContainerStarted","Data":"5b174c67bd231b5a2cf56bdd97b1df4bed7c31e881cd426ef097f0f200ddcd79"} Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.939902 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:12 crc kubenswrapper[4762]: I1009 15:01:12.939930 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:13 crc kubenswrapper[4762]: I1009 15:01:13.407970 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.73:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:13 crc kubenswrapper[4762]: I1009 15:01:13.450857 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.73:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:16 crc kubenswrapper[4762]: I1009 15:01:16.014244 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 15:01:16 crc kubenswrapper[4762]: I1009 15:01:16.042718 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 15:01:16 crc kubenswrapper[4762]: I1009 15:01:16.060122 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-gzfmt" podStartSLOduration=5.060100313 podStartE2EDuration="5.060100313s" podCreationTimestamp="2025-10-09 15:01:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:12.417934082 +0000 UTC m=+5748.191725121" watchObservedRunningTime="2025-10-09 15:01:16.060100313 +0000 UTC m=+5751.833891362" Oct 09 15:01:16 crc kubenswrapper[4762]: I1009 15:01:16.471455 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 15:01:17 crc kubenswrapper[4762]: I1009 15:01:17.458478 4762 generic.go:334] "Generic (PLEG): container finished" podID="b2422116-f72c-42ae-83b2-084763842592" containerID="2978feb96559cfc45dc0e290c0d2243b5ba1cd4945be26e3c592f451540755e5" exitCode=0 Oct 09 15:01:17 crc kubenswrapper[4762]: I1009 15:01:17.458560 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gzfmt" event={"ID":"b2422116-f72c-42ae-83b2-084763842592","Type":"ContainerDied","Data":"2978feb96559cfc45dc0e290c0d2243b5ba1cd4945be26e3c592f451540755e5"} Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.789155 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.934753 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data\") pod \"b2422116-f72c-42ae-83b2-084763842592\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.934844 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts\") pod \"b2422116-f72c-42ae-83b2-084763842592\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.934930 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9xxv\" (UniqueName: \"kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv\") pod \"b2422116-f72c-42ae-83b2-084763842592\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.935035 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle\") pod \"b2422116-f72c-42ae-83b2-084763842592\" (UID: \"b2422116-f72c-42ae-83b2-084763842592\") " Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.940818 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts" (OuterVolumeSpecName: "scripts") pod "b2422116-f72c-42ae-83b2-084763842592" (UID: "b2422116-f72c-42ae-83b2-084763842592"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.942475 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv" (OuterVolumeSpecName: "kube-api-access-d9xxv") pod "b2422116-f72c-42ae-83b2-084763842592" (UID: "b2422116-f72c-42ae-83b2-084763842592"). InnerVolumeSpecName "kube-api-access-d9xxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.964296 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2422116-f72c-42ae-83b2-084763842592" (UID: "b2422116-f72c-42ae-83b2-084763842592"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:18 crc kubenswrapper[4762]: I1009 15:01:18.965976 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data" (OuterVolumeSpecName: "config-data") pod "b2422116-f72c-42ae-83b2-084763842592" (UID: "b2422116-f72c-42ae-83b2-084763842592"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.036941 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.036983 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.036996 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9xxv\" (UniqueName: \"kubernetes.io/projected/b2422116-f72c-42ae-83b2-084763842592-kube-api-access-d9xxv\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.037008 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2422116-f72c-42ae-83b2-084763842592-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.481066 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-gzfmt" event={"ID":"b2422116-f72c-42ae-83b2-084763842592","Type":"ContainerDied","Data":"5b174c67bd231b5a2cf56bdd97b1df4bed7c31e881cd426ef097f0f200ddcd79"} Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.481120 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b174c67bd231b5a2cf56bdd97b1df4bed7c31e881cd426ef097f0f200ddcd79" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.481126 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-gzfmt" Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.667725 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.668267 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-log" containerID="cri-o://8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c" gracePeriod=30 Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.668405 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-api" containerID="cri-o://b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2" gracePeriod=30 Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.754248 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.754490 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerName="nova-scheduler-scheduler" containerID="cri-o://8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" gracePeriod=30 Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.771089 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.771736 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-metadata" containerID="cri-o://1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb" gracePeriod=30 Oct 09 15:01:19 crc kubenswrapper[4762]: I1009 15:01:19.772305 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-log" containerID="cri-o://2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31" gracePeriod=30 Oct 09 15:01:20 crc kubenswrapper[4762]: I1009 15:01:20.492229 4762 generic.go:334] "Generic (PLEG): container finished" podID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerID="8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c" exitCode=143 Oct 09 15:01:20 crc kubenswrapper[4762]: I1009 15:01:20.492321 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerDied","Data":"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c"} Oct 09 15:01:20 crc kubenswrapper[4762]: I1009 15:01:20.495697 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerID="2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31" exitCode=143 Oct 09 15:01:20 crc kubenswrapper[4762]: I1009 15:01:20.495747 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerDied","Data":"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31"} Oct 09 15:01:21 crc kubenswrapper[4762]: E1009 15:01:21.015915 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:01:21 crc kubenswrapper[4762]: E1009 15:01:21.017682 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:01:21 crc kubenswrapper[4762]: E1009 15:01:21.020080 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:01:21 crc kubenswrapper[4762]: E1009 15:01:21.020120 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerName="nova-scheduler-scheduler" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.301678 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.311357 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.335538 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbaacaa1_30a1_472f_bb6b_bdbc1aab34a0.slice/crio-1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb.scope\": RecentStats: unable to find data in memory cache]" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432180 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data\") pod \"f0692db6-e35e-436d-81d6-f587c0ad34dd\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432227 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs\") pod \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432296 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjqqq\" (UniqueName: \"kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq\") pod \"f0692db6-e35e-436d-81d6-f587c0ad34dd\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432344 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data\") pod \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432445 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle\") pod \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432477 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc74k\" (UniqueName: \"kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k\") pod \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\" (UID: \"be2ba098-febe-44ac-bb12-b3c29e9aaa8f\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.432525 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle\") pod \"f0692db6-e35e-436d-81d6-f587c0ad34dd\" (UID: \"f0692db6-e35e-436d-81d6-f587c0ad34dd\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.435784 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs" (OuterVolumeSpecName: "logs") pod "be2ba098-febe-44ac-bb12-b3c29e9aaa8f" (UID: "be2ba098-febe-44ac-bb12-b3c29e9aaa8f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.439825 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq" (OuterVolumeSpecName: "kube-api-access-tjqqq") pod "f0692db6-e35e-436d-81d6-f587c0ad34dd" (UID: "f0692db6-e35e-436d-81d6-f587c0ad34dd"). InnerVolumeSpecName "kube-api-access-tjqqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.440298 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k" (OuterVolumeSpecName: "kube-api-access-gc74k") pod "be2ba098-febe-44ac-bb12-b3c29e9aaa8f" (UID: "be2ba098-febe-44ac-bb12-b3c29e9aaa8f"). InnerVolumeSpecName "kube-api-access-gc74k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.448758 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.464338 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be2ba098-febe-44ac-bb12-b3c29e9aaa8f" (UID: "be2ba098-febe-44ac-bb12-b3c29e9aaa8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.465777 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0692db6-e35e-436d-81d6-f587c0ad34dd" (UID: "f0692db6-e35e-436d-81d6-f587c0ad34dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.479884 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data" (OuterVolumeSpecName: "config-data") pod "be2ba098-febe-44ac-bb12-b3c29e9aaa8f" (UID: "be2ba098-febe-44ac-bb12-b3c29e9aaa8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.483837 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data" (OuterVolumeSpecName: "config-data") pod "f0692db6-e35e-436d-81d6-f587c0ad34dd" (UID: "f0692db6-e35e-436d-81d6-f587c0ad34dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.520474 4762 generic.go:334] "Generic (PLEG): container finished" podID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerID="b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2" exitCode=0 Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.520515 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerDied","Data":"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.520557 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.520959 4762 scope.go:117] "RemoveContainer" containerID="b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.520890 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"be2ba098-febe-44ac-bb12-b3c29e9aaa8f","Type":"ContainerDied","Data":"8fcd37aae434a63e15513316720129d7ad7a6e63aac6d39c388d50444a0e51f4"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.522588 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.522605 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0692db6-e35e-436d-81d6-f587c0ad34dd","Type":"ContainerDied","Data":"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.522559 4762 generic.go:334] "Generic (PLEG): container finished" podID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" exitCode=0 Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.522761 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0692db6-e35e-436d-81d6-f587c0ad34dd","Type":"ContainerDied","Data":"f3086ace1f57edb488b5e03369f95db5cefd10d7795d531013c7aaffac4fd637"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.525453 4762 generic.go:334] "Generic (PLEG): container finished" podID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerID="1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb" exitCode=0 Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.525527 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.525536 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerDied","Data":"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.525988 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0","Type":"ContainerDied","Data":"3e250a711316f592ebbb408bdd7d5604beb49667c23a80460c7c92edd0ed45f2"} Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.534660 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data\") pod \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.534748 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") pod \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.534794 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sz6x\" (UniqueName: \"kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x\") pod \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535025 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs\") pod \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535505 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535539 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535553 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjqqq\" (UniqueName: \"kubernetes.io/projected/f0692db6-e35e-436d-81d6-f587c0ad34dd-kube-api-access-tjqqq\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535566 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535574 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535583 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc74k\" (UniqueName: \"kubernetes.io/projected/be2ba098-febe-44ac-bb12-b3c29e9aaa8f-kube-api-access-gc74k\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535594 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0692db6-e35e-436d-81d6-f587c0ad34dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.535649 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs" (OuterVolumeSpecName: "logs") pod "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" (UID: "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.538130 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x" (OuterVolumeSpecName: "kube-api-access-4sz6x") pod "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" (UID: "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0"). InnerVolumeSpecName "kube-api-access-4sz6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.549966 4762 scope.go:117] "RemoveContainer" containerID="8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.570104 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.585017 4762 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle podName:fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0 nodeName:}" failed. No retries permitted until 2025-10-09 15:01:24.084979298 +0000 UTC m=+5759.858770347 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle") pod "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" (UID: "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0") : error deleting /var/lib/kubelet/pods/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0/volume-subpaths: remove /var/lib/kubelet/pods/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0/volume-subpaths: no such file or directory Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.592748 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data" (OuterVolumeSpecName: "config-data") pod "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" (UID: "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.601997 4762 scope.go:117] "RemoveContainer" containerID="b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.602091 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.602539 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2\": container with ID starting with b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2 not found: ID does not exist" containerID="b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.602574 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2"} err="failed to get container status \"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2\": rpc error: code = NotFound desc = could not find container \"b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2\": container with ID starting with b7e68933a9a9c53c9232e10d05da4ecc384188d9d36cfd9762cef05b39400dc2 not found: ID does not exist" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.602602 4762 scope.go:117] "RemoveContainer" containerID="8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.603149 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c\": container with ID starting with 8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c not found: ID does not exist" containerID="8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.603184 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c"} err="failed to get container status \"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c\": rpc error: code = NotFound desc = could not find container \"8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c\": container with ID starting with 8d8f2594356b0eae1c6a4dc924282c18feaf2bbbf38165706edf1ef8cfc6985c not found: ID does not exist" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.603214 4762 scope.go:117] "RemoveContainer" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.616669 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.629238 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.631979 4762 scope.go:117] "RemoveContainer" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.632428 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429\": container with ID starting with 8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429 not found: ID does not exist" containerID="8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.632470 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429"} err="failed to get container status \"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429\": rpc error: code = NotFound desc = could not find container \"8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429\": container with ID starting with 8521e36928271b846e94877cd41a762d87a0a2b673107c707d297dbbad2b3429 not found: ID does not exist" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.632498 4762 scope.go:117] "RemoveContainer" containerID="1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.636365 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.636955 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerName="nova-scheduler-scheduler" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.636973 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerName="nova-scheduler-scheduler" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.637001 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-log" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637008 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-log" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.637024 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-api" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637032 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-api" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.637043 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2422116-f72c-42ae-83b2-084763842592" containerName="nova-manage" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637049 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2422116-f72c-42ae-83b2-084763842592" containerName="nova-manage" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.637066 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-log" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637072 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-log" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.637085 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-metadata" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637093 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-metadata" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637289 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-api" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637298 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637321 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sz6x\" (UniqueName: \"kubernetes.io/projected/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-kube-api-access-4sz6x\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637305 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" containerName="nova-api-log" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637333 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637349 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" containerName="nova-scheduler-scheduler" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637372 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-log" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637384 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2422116-f72c-42ae-83b2-084763842592" containerName="nova-manage" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.637414 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" containerName="nova-metadata-metadata" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.638574 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.640603 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.647102 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.655498 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.656740 4762 scope.go:117] "RemoveContainer" containerID="2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.657103 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.658865 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.664796 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.680017 4762 scope.go:117] "RemoveContainer" containerID="1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.680434 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb\": container with ID starting with 1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb not found: ID does not exist" containerID="1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.680463 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb"} err="failed to get container status \"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb\": rpc error: code = NotFound desc = could not find container \"1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb\": container with ID starting with 1ba16ce4c116f6a9b3aa59288476d1a6c8cef873dfef38e75255b439c2963beb not found: ID does not exist" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.680488 4762 scope.go:117] "RemoveContainer" containerID="2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31" Oct 09 15:01:23 crc kubenswrapper[4762]: E1009 15:01:23.680737 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31\": container with ID starting with 2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31 not found: ID does not exist" containerID="2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.680761 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31"} err="failed to get container status \"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31\": rpc error: code = NotFound desc = could not find container \"2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31\": container with ID starting with 2b2500fdcfb8a55897f270ca78a329aedb0ae3f461d9deea44e237f0f171fb31 not found: ID does not exist" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739429 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739513 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77khq\" (UniqueName: \"kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739612 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739699 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w6ss\" (UniqueName: \"kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739729 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739778 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.739848 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.841952 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w6ss\" (UniqueName: \"kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.842073 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.842182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.842249 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.844109 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.844240 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77khq\" (UniqueName: \"kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.844401 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.846035 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.849808 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.849921 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.856988 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.857581 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.867550 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w6ss\" (UniqueName: \"kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss\") pod \"nova-api-0\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.868863 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77khq\" (UniqueName: \"kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq\") pod \"nova-scheduler-0\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " pod="openstack/nova-scheduler-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.956018 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:01:23 crc kubenswrapper[4762]: I1009 15:01:23.978905 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.148884 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") pod \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\" (UID: \"fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0\") " Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.155820 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" (UID: "fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.251740 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:24 crc kubenswrapper[4762]: W1009 15:01:24.420063 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0719eaf7_7d99_461f_87eb_a59afb72d0bf.slice/crio-0689ce6b5e86d2709a723e53d8f443b87a20a909f3ac23dda58263708539fe63 WatchSource:0}: Error finding container 0689ce6b5e86d2709a723e53d8f443b87a20a909f3ac23dda58263708539fe63: Status 404 returned error can't find the container with id 0689ce6b5e86d2709a723e53d8f443b87a20a909f3ac23dda58263708539fe63 Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.422473 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.462459 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.474716 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.499555 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.501733 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.504249 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 15:01:24 crc kubenswrapper[4762]: W1009 15:01:24.504253 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86fd2721_354b_4c22_91e5_ba277c8b49ba.slice/crio-3dc9373db0e9b12a8eeb6ed8295e591e88acae30906faf84d8f6f077465c2482 WatchSource:0}: Error finding container 3dc9373db0e9b12a8eeb6ed8295e591e88acae30906faf84d8f6f077465c2482: Status 404 returned error can't find the container with id 3dc9373db0e9b12a8eeb6ed8295e591e88acae30906faf84d8f6f077465c2482 Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.515025 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.532920 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.548208 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86fd2721-354b-4c22-91e5-ba277c8b49ba","Type":"ContainerStarted","Data":"3dc9373db0e9b12a8eeb6ed8295e591e88acae30906faf84d8f6f077465c2482"} Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.556528 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerStarted","Data":"0689ce6b5e86d2709a723e53d8f443b87a20a909f3ac23dda58263708539fe63"} Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.561229 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.561312 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.561348 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.561613 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqzxw\" (UniqueName: \"kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.664010 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqzxw\" (UniqueName: \"kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.664089 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.664125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.664147 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.664662 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.667032 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.667773 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.682133 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqzxw\" (UniqueName: \"kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw\") pod \"nova-metadata-0\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.840487 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.985772 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be2ba098-febe-44ac-bb12-b3c29e9aaa8f" path="/var/lib/kubelet/pods/be2ba098-febe-44ac-bb12-b3c29e9aaa8f/volumes" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.994697 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0692db6-e35e-436d-81d6-f587c0ad34dd" path="/var/lib/kubelet/pods/f0692db6-e35e-436d-81d6-f587c0ad34dd/volumes" Oct 09 15:01:24 crc kubenswrapper[4762]: I1009 15:01:24.999388 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0" path="/var/lib/kubelet/pods/fbaacaa1-30a1-472f-bb6b-bdbc1aab34a0/volumes" Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.291854 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.573827 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerStarted","Data":"2aad9fa3bfe787bbaf51acb89a9c6dd57b3e820e06eda0485dc0c4fb5af34639"} Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.574144 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerStarted","Data":"96da8ce0dc1873e3502f1fec7e10583798040d5f1a433dcc8b21d17fc30d27d9"} Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.576242 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86fd2721-354b-4c22-91e5-ba277c8b49ba","Type":"ContainerStarted","Data":"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319"} Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.581090 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerStarted","Data":"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f"} Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.581132 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerStarted","Data":"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f"} Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.595076 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.595060668 podStartE2EDuration="2.595060668s" podCreationTimestamp="2025-10-09 15:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:25.59279987 +0000 UTC m=+5761.366590909" watchObservedRunningTime="2025-10-09 15:01:25.595060668 +0000 UTC m=+5761.368851707" Oct 09 15:01:25 crc kubenswrapper[4762]: I1009 15:01:25.617169 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.617148023 podStartE2EDuration="2.617148023s" podCreationTimestamp="2025-10-09 15:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:25.611543697 +0000 UTC m=+5761.385334756" watchObservedRunningTime="2025-10-09 15:01:25.617148023 +0000 UTC m=+5761.390939062" Oct 09 15:01:26 crc kubenswrapper[4762]: I1009 15:01:26.596382 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerStarted","Data":"3f0ba421cd0c7d22de9fe94627781f111e4d43969f14da04dd768d3e0c454e24"} Oct 09 15:01:26 crc kubenswrapper[4762]: I1009 15:01:26.623876 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.623851842 podStartE2EDuration="2.623851842s" podCreationTimestamp="2025-10-09 15:01:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:26.618196335 +0000 UTC m=+5762.391987384" watchObservedRunningTime="2025-10-09 15:01:26.623851842 +0000 UTC m=+5762.397642881" Oct 09 15:01:28 crc kubenswrapper[4762]: I1009 15:01:28.978895 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 15:01:29 crc kubenswrapper[4762]: I1009 15:01:29.840797 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:01:29 crc kubenswrapper[4762]: I1009 15:01:29.841151 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:01:33 crc kubenswrapper[4762]: I1009 15:01:33.956683 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:01:33 crc kubenswrapper[4762]: I1009 15:01:33.957329 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:01:33 crc kubenswrapper[4762]: I1009 15:01:33.978859 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 15:01:34 crc kubenswrapper[4762]: I1009 15:01:34.007269 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 15:01:34 crc kubenswrapper[4762]: I1009 15:01:34.717304 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 15:01:34 crc kubenswrapper[4762]: I1009 15:01:34.841365 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:01:34 crc kubenswrapper[4762]: I1009 15:01:34.841443 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:01:35 crc kubenswrapper[4762]: I1009 15:01:35.038842 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:35 crc kubenswrapper[4762]: I1009 15:01:35.038812 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:35 crc kubenswrapper[4762]: I1009 15:01:35.923913 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.78:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:35 crc kubenswrapper[4762]: I1009 15:01:35.923959 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.78:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:01:41 crc kubenswrapper[4762]: I1009 15:01:41.969050 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:01:41 crc kubenswrapper[4762]: I1009 15:01:41.969703 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:01:43 crc kubenswrapper[4762]: I1009 15:01:43.960938 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:01:43 crc kubenswrapper[4762]: I1009 15:01:43.961775 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:01:43 crc kubenswrapper[4762]: I1009 15:01:43.966512 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:01:43 crc kubenswrapper[4762]: I1009 15:01:43.968472 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.765382 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.770223 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.847180 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.851018 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.854987 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.985666 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.988472 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:44 crc kubenswrapper[4762]: I1009 15:01:44.995341 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.092352 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.092458 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.092561 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.092584 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbr68\" (UniqueName: \"kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.092709 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.194456 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.194511 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbr68\" (UniqueName: \"kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.194533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.194597 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.194737 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.195680 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.195778 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.195896 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.196451 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.234439 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbr68\" (UniqueName: \"kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68\") pod \"dnsmasq-dns-84486dbb57-bsk9z\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.327140 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.780279 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:01:45 crc kubenswrapper[4762]: I1009 15:01:45.830261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:01:46 crc kubenswrapper[4762]: I1009 15:01:46.786445 4762 generic.go:334] "Generic (PLEG): container finished" podID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerID="eb6091a4bea7e6616ea57f3bd1c85cd2aaeba71fc29bbcd7bb42b1021350f1fa" exitCode=0 Oct 09 15:01:46 crc kubenswrapper[4762]: I1009 15:01:46.786540 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" event={"ID":"f910c1bd-7092-4524-8fd2-e3612bdb268c","Type":"ContainerDied","Data":"eb6091a4bea7e6616ea57f3bd1c85cd2aaeba71fc29bbcd7bb42b1021350f1fa"} Oct 09 15:01:46 crc kubenswrapper[4762]: I1009 15:01:46.786844 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" event={"ID":"f910c1bd-7092-4524-8fd2-e3612bdb268c","Type":"ContainerStarted","Data":"bdac00e9ebb8d93e5b95d6a5df947198821ac8669e9255e3bd2e0578cadc4559"} Oct 09 15:01:47 crc kubenswrapper[4762]: I1009 15:01:47.804283 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" event={"ID":"f910c1bd-7092-4524-8fd2-e3612bdb268c","Type":"ContainerStarted","Data":"83c5f3091b6a9df7c98181c9d09065689902f7e29bbb06a7c418769f03b49ac1"} Oct 09 15:01:47 crc kubenswrapper[4762]: I1009 15:01:47.824842 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" podStartSLOduration=3.824819656 podStartE2EDuration="3.824819656s" podCreationTimestamp="2025-10-09 15:01:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:01:47.822082784 +0000 UTC m=+5783.595873843" watchObservedRunningTime="2025-10-09 15:01:47.824819656 +0000 UTC m=+5783.598610715" Oct 09 15:01:48 crc kubenswrapper[4762]: I1009 15:01:48.814697 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.328977 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.406230 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.406549 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="dnsmasq-dns" containerID="cri-o://b2a7512f49583bfa03624913cb2fe31a8e8cfc8af65db7142a5c1c571d761056" gracePeriod=10 Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.879431 4762 generic.go:334] "Generic (PLEG): container finished" podID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerID="b2a7512f49583bfa03624913cb2fe31a8e8cfc8af65db7142a5c1c571d761056" exitCode=0 Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.879826 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" event={"ID":"5afc5cff-4cae-4190-ae19-7b53163e3824","Type":"ContainerDied","Data":"b2a7512f49583bfa03624913cb2fe31a8e8cfc8af65db7142a5c1c571d761056"} Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.879856 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" event={"ID":"5afc5cff-4cae-4190-ae19-7b53163e3824","Type":"ContainerDied","Data":"f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b"} Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.879867 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2e45df5c1f73cd62a752e35855129632a0c88c28022d42fbd9a6fe4f7b95e0b" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.882337 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.902390 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc\") pod \"5afc5cff-4cae-4190-ae19-7b53163e3824\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.902579 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb\") pod \"5afc5cff-4cae-4190-ae19-7b53163e3824\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.902676 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config\") pod \"5afc5cff-4cae-4190-ae19-7b53163e3824\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.902718 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xt7nl\" (UniqueName: \"kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl\") pod \"5afc5cff-4cae-4190-ae19-7b53163e3824\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.902827 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb\") pod \"5afc5cff-4cae-4190-ae19-7b53163e3824\" (UID: \"5afc5cff-4cae-4190-ae19-7b53163e3824\") " Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.923079 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl" (OuterVolumeSpecName: "kube-api-access-xt7nl") pod "5afc5cff-4cae-4190-ae19-7b53163e3824" (UID: "5afc5cff-4cae-4190-ae19-7b53163e3824"). InnerVolumeSpecName "kube-api-access-xt7nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.960143 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5afc5cff-4cae-4190-ae19-7b53163e3824" (UID: "5afc5cff-4cae-4190-ae19-7b53163e3824"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.970789 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config" (OuterVolumeSpecName: "config") pod "5afc5cff-4cae-4190-ae19-7b53163e3824" (UID: "5afc5cff-4cae-4190-ae19-7b53163e3824"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.971467 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5afc5cff-4cae-4190-ae19-7b53163e3824" (UID: "5afc5cff-4cae-4190-ae19-7b53163e3824"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:55 crc kubenswrapper[4762]: I1009 15:01:55.985559 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5afc5cff-4cae-4190-ae19-7b53163e3824" (UID: "5afc5cff-4cae-4190-ae19-7b53163e3824"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.008654 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xt7nl\" (UniqueName: \"kubernetes.io/projected/5afc5cff-4cae-4190-ae19-7b53163e3824-kube-api-access-xt7nl\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.008701 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.008716 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.008732 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.008745 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5afc5cff-4cae-4190-ae19-7b53163e3824-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.887854 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c4b585c7-nmh5n" Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.922272 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.932413 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c4b585c7-nmh5n"] Oct 09 15:01:56 crc kubenswrapper[4762]: I1009 15:01:56.975515 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" path="/var/lib/kubelet/pods/5afc5cff-4cae-4190-ae19-7b53163e3824/volumes" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.054508 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mstc5"] Oct 09 15:01:58 crc kubenswrapper[4762]: E1009 15:01:58.054954 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="dnsmasq-dns" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.054966 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="dnsmasq-dns" Oct 09 15:01:58 crc kubenswrapper[4762]: E1009 15:01:58.054993 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="init" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.055001 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="init" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.055154 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5afc5cff-4cae-4190-ae19-7b53163e3824" containerName="dnsmasq-dns" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.055890 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mstc5" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.068298 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mstc5"] Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.148853 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqnzd\" (UniqueName: \"kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd\") pod \"cinder-db-create-mstc5\" (UID: \"1b169a81-9531-49df-aa7c-3009e7e9c41a\") " pod="openstack/cinder-db-create-mstc5" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.251816 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqnzd\" (UniqueName: \"kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd\") pod \"cinder-db-create-mstc5\" (UID: \"1b169a81-9531-49df-aa7c-3009e7e9c41a\") " pod="openstack/cinder-db-create-mstc5" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.272328 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqnzd\" (UniqueName: \"kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd\") pod \"cinder-db-create-mstc5\" (UID: \"1b169a81-9531-49df-aa7c-3009e7e9c41a\") " pod="openstack/cinder-db-create-mstc5" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.395623 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mstc5" Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.873400 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mstc5"] Oct 09 15:01:58 crc kubenswrapper[4762]: W1009 15:01:58.873608 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b169a81_9531_49df_aa7c_3009e7e9c41a.slice/crio-5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656 WatchSource:0}: Error finding container 5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656: Status 404 returned error can't find the container with id 5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656 Oct 09 15:01:58 crc kubenswrapper[4762]: I1009 15:01:58.909965 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mstc5" event={"ID":"1b169a81-9531-49df-aa7c-3009e7e9c41a","Type":"ContainerStarted","Data":"5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656"} Oct 09 15:01:59 crc kubenswrapper[4762]: I1009 15:01:59.920293 4762 generic.go:334] "Generic (PLEG): container finished" podID="1b169a81-9531-49df-aa7c-3009e7e9c41a" containerID="a02f4a2b3581afc3705cd3fbff564adb1b46dd4e5b02f91768f8caf8b4b75a5f" exitCode=0 Oct 09 15:01:59 crc kubenswrapper[4762]: I1009 15:01:59.920396 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mstc5" event={"ID":"1b169a81-9531-49df-aa7c-3009e7e9c41a","Type":"ContainerDied","Data":"a02f4a2b3581afc3705cd3fbff564adb1b46dd4e5b02f91768f8caf8b4b75a5f"} Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.258822 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mstc5" Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.310413 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqnzd\" (UniqueName: \"kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd\") pod \"1b169a81-9531-49df-aa7c-3009e7e9c41a\" (UID: \"1b169a81-9531-49df-aa7c-3009e7e9c41a\") " Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.315722 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd" (OuterVolumeSpecName: "kube-api-access-hqnzd") pod "1b169a81-9531-49df-aa7c-3009e7e9c41a" (UID: "1b169a81-9531-49df-aa7c-3009e7e9c41a"). InnerVolumeSpecName "kube-api-access-hqnzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.413192 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqnzd\" (UniqueName: \"kubernetes.io/projected/1b169a81-9531-49df-aa7c-3009e7e9c41a-kube-api-access-hqnzd\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.939093 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mstc5" event={"ID":"1b169a81-9531-49df-aa7c-3009e7e9c41a","Type":"ContainerDied","Data":"5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656"} Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.939410 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5037f718b58660d4298745f63cd4250f36dd83c0d0b5f8a4074a176af8ed7656" Oct 09 15:02:01 crc kubenswrapper[4762]: I1009 15:02:01.939160 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mstc5" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.153538 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-cd9d-account-create-9x98d"] Oct 09 15:02:08 crc kubenswrapper[4762]: E1009 15:02:08.154525 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b169a81-9531-49df-aa7c-3009e7e9c41a" containerName="mariadb-database-create" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.154545 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b169a81-9531-49df-aa7c-3009e7e9c41a" containerName="mariadb-database-create" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.155817 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b169a81-9531-49df-aa7c-3009e7e9c41a" containerName="mariadb-database-create" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.156470 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.163190 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.202377 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cd9d-account-create-9x98d"] Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.240843 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtg7p\" (UniqueName: \"kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p\") pod \"cinder-cd9d-account-create-9x98d\" (UID: \"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b\") " pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.342129 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtg7p\" (UniqueName: \"kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p\") pod \"cinder-cd9d-account-create-9x98d\" (UID: \"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b\") " pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.360087 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtg7p\" (UniqueName: \"kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p\") pod \"cinder-cd9d-account-create-9x98d\" (UID: \"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b\") " pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.486267 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.899214 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cd9d-account-create-9x98d"] Oct 09 15:02:08 crc kubenswrapper[4762]: W1009 15:02:08.903750 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfc1d3b0_b2d3_4b87_81bb_6d9bf69df68b.slice/crio-f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60 WatchSource:0}: Error finding container f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60: Status 404 returned error can't find the container with id f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60 Oct 09 15:02:08 crc kubenswrapper[4762]: I1009 15:02:08.996345 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd9d-account-create-9x98d" event={"ID":"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b","Type":"ContainerStarted","Data":"f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60"} Oct 09 15:02:10 crc kubenswrapper[4762]: I1009 15:02:10.008667 4762 generic.go:334] "Generic (PLEG): container finished" podID="dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" containerID="a6687d4271a97d4dc5c326eac5597fea018356c7be9f90cc53ce4ecf421d42ae" exitCode=0 Oct 09 15:02:10 crc kubenswrapper[4762]: I1009 15:02:10.008757 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd9d-account-create-9x98d" event={"ID":"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b","Type":"ContainerDied","Data":"a6687d4271a97d4dc5c326eac5597fea018356c7be9f90cc53ce4ecf421d42ae"} Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.364181 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.495690 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtg7p\" (UniqueName: \"kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p\") pod \"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b\" (UID: \"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b\") " Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.502940 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p" (OuterVolumeSpecName: "kube-api-access-dtg7p") pod "dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" (UID: "dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b"). InnerVolumeSpecName "kube-api-access-dtg7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.597963 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtg7p\" (UniqueName: \"kubernetes.io/projected/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b-kube-api-access-dtg7p\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.969604 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.969701 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.969754 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.970396 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:02:11 crc kubenswrapper[4762]: I1009 15:02:11.970470 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce" gracePeriod=600 Oct 09 15:02:12 crc kubenswrapper[4762]: I1009 15:02:12.030724 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd9d-account-create-9x98d" event={"ID":"dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b","Type":"ContainerDied","Data":"f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60"} Oct 09 15:02:12 crc kubenswrapper[4762]: I1009 15:02:12.030763 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7eb09de12f33e7e4c42901ec8e469d1da045bb627c05a55e0d2f9bd5b056c60" Oct 09 15:02:12 crc kubenswrapper[4762]: I1009 15:02:12.030799 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd9d-account-create-9x98d" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.046062 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce" exitCode=0 Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.046133 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce"} Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.046666 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38"} Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.046691 4762 scope.go:117] "RemoveContainer" containerID="c75ef9853f5fe1f6bce3930681cb05899e816bea18fa88b6c554eafd348de735" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.373765 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-2pr7h"] Oct 09 15:02:13 crc kubenswrapper[4762]: E1009 15:02:13.374229 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" containerName="mariadb-account-create" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.374249 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" containerName="mariadb-account-create" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.374439 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" containerName="mariadb-account-create" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.375252 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.378122 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-67bvs" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.378157 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.379945 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.384327 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2pr7h"] Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449114 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449191 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449254 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl6jf\" (UniqueName: \"kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449399 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.449421 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550649 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550716 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550762 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550815 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550858 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl6jf\" (UniqueName: \"kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.550902 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.551104 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.564386 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.564779 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.565037 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.566474 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.568258 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl6jf\" (UniqueName: \"kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf\") pod \"cinder-db-sync-2pr7h\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.692944 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:13 crc kubenswrapper[4762]: I1009 15:02:13.962820 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2pr7h"] Oct 09 15:02:13 crc kubenswrapper[4762]: W1009 15:02:13.970328 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc9a1cfc_3f68_493e_b65e_e3eef9864e9f.slice/crio-d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c WatchSource:0}: Error finding container d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c: Status 404 returned error can't find the container with id d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c Oct 09 15:02:14 crc kubenswrapper[4762]: I1009 15:02:14.061140 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2pr7h" event={"ID":"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f","Type":"ContainerStarted","Data":"d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c"} Oct 09 15:02:15 crc kubenswrapper[4762]: I1009 15:02:15.073801 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2pr7h" event={"ID":"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f","Type":"ContainerStarted","Data":"ddbf059ec0c5cd57e49045e1c1fec082fba70e626274a6ad41e1216adeecc0e5"} Oct 09 15:02:15 crc kubenswrapper[4762]: I1009 15:02:15.095910 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-2pr7h" podStartSLOduration=2.095891536 podStartE2EDuration="2.095891536s" podCreationTimestamp="2025-10-09 15:02:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:15.087190609 +0000 UTC m=+5810.860981648" watchObservedRunningTime="2025-10-09 15:02:15.095891536 +0000 UTC m=+5810.869682575" Oct 09 15:02:18 crc kubenswrapper[4762]: I1009 15:02:18.103039 4762 generic.go:334] "Generic (PLEG): container finished" podID="dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" containerID="ddbf059ec0c5cd57e49045e1c1fec082fba70e626274a6ad41e1216adeecc0e5" exitCode=0 Oct 09 15:02:18 crc kubenswrapper[4762]: I1009 15:02:18.103109 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2pr7h" event={"ID":"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f","Type":"ContainerDied","Data":"ddbf059ec0c5cd57e49045e1c1fec082fba70e626274a6ad41e1216adeecc0e5"} Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.447056 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572136 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572311 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572425 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl6jf\" (UniqueName: \"kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572477 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572533 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.572582 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data\") pod \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\" (UID: \"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f\") " Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.573079 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.573446 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.577726 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts" (OuterVolumeSpecName: "scripts") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.578316 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf" (OuterVolumeSpecName: "kube-api-access-tl6jf") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "kube-api-access-tl6jf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.578567 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.599189 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.618611 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data" (OuterVolumeSpecName: "config-data") pod "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" (UID: "dc9a1cfc-3f68-493e-b65e-e3eef9864e9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.675053 4762 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.675109 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.675124 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl6jf\" (UniqueName: \"kubernetes.io/projected/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-kube-api-access-tl6jf\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.675136 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:19 crc kubenswrapper[4762]: I1009 15:02:19.675149 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.119507 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2pr7h" event={"ID":"dc9a1cfc-3f68-493e-b65e-e3eef9864e9f","Type":"ContainerDied","Data":"d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c"} Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.119555 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d21bea05f2e296f92d5bf3c97aa972f22e505f7b5d4846860de63ed001d80a1c" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.119584 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2pr7h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.467483 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:02:20 crc kubenswrapper[4762]: E1009 15:02:20.467948 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" containerName="cinder-db-sync" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.467965 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" containerName="cinder-db-sync" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.468157 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" containerName="cinder-db-sync" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.469422 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.501734 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.594668 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfr42\" (UniqueName: \"kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.594766 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.594813 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.594849 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.594878 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.696904 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.697293 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.697349 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.697455 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfr42\" (UniqueName: \"kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.698084 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.698317 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.698774 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.698980 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.699709 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.717752 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfr42\" (UniqueName: \"kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42\") pod \"dnsmasq-dns-694cc885dc-ccw4h\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.762622 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.764942 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.768268 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.768843 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-67bvs" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.768927 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.776992 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.778824 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.784873 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806107 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4jlp\" (UniqueName: \"kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806177 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806283 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806353 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806407 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806432 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.806558 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908141 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908432 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4jlp\" (UniqueName: \"kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908454 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908513 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908552 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908582 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908601 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.908999 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.909756 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.913404 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.914687 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.923044 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.929805 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:20 crc kubenswrapper[4762]: I1009 15:02:20.938215 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4jlp\" (UniqueName: \"kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp\") pod \"cinder-api-0\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " pod="openstack/cinder-api-0" Oct 09 15:02:21 crc kubenswrapper[4762]: I1009 15:02:21.109251 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:21 crc kubenswrapper[4762]: I1009 15:02:21.114532 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:02:21 crc kubenswrapper[4762]: W1009 15:02:21.130808 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35e5a3da_9dde_4bae_87bb_e24ed6f5a38b.slice/crio-9f34d706f5c2e04db7adf4281d9bcfdc042998919c2a600f7f101b33edd42199 WatchSource:0}: Error finding container 9f34d706f5c2e04db7adf4281d9bcfdc042998919c2a600f7f101b33edd42199: Status 404 returned error can't find the container with id 9f34d706f5c2e04db7adf4281d9bcfdc042998919c2a600f7f101b33edd42199 Oct 09 15:02:21 crc kubenswrapper[4762]: I1009 15:02:21.442364 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:21 crc kubenswrapper[4762]: W1009 15:02:21.463551 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24951274_fd59_4f3d_bf68_ce86450e245e.slice/crio-68def4d8f02e49238cae6ae7cb5f6ca8477fe581444e72310dd06a461b82f3c6 WatchSource:0}: Error finding container 68def4d8f02e49238cae6ae7cb5f6ca8477fe581444e72310dd06a461b82f3c6: Status 404 returned error can't find the container with id 68def4d8f02e49238cae6ae7cb5f6ca8477fe581444e72310dd06a461b82f3c6 Oct 09 15:02:22 crc kubenswrapper[4762]: I1009 15:02:22.153250 4762 generic.go:334] "Generic (PLEG): container finished" podID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerID="fa6b4da390bfd42ec3bc81b8c0421f1afa415a262d97c43ed88f5f7e4b672e21" exitCode=0 Oct 09 15:02:22 crc kubenswrapper[4762]: I1009 15:02:22.153378 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" event={"ID":"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b","Type":"ContainerDied","Data":"fa6b4da390bfd42ec3bc81b8c0421f1afa415a262d97c43ed88f5f7e4b672e21"} Oct 09 15:02:22 crc kubenswrapper[4762]: I1009 15:02:22.153614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" event={"ID":"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b","Type":"ContainerStarted","Data":"9f34d706f5c2e04db7adf4281d9bcfdc042998919c2a600f7f101b33edd42199"} Oct 09 15:02:22 crc kubenswrapper[4762]: I1009 15:02:22.157247 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerStarted","Data":"2717941d1e60950bc731a8c9d6203cc9e2bc7873335de8476f87ce9e554301ea"} Oct 09 15:02:22 crc kubenswrapper[4762]: I1009 15:02:22.157291 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerStarted","Data":"68def4d8f02e49238cae6ae7cb5f6ca8477fe581444e72310dd06a461b82f3c6"} Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.174064 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" event={"ID":"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b","Type":"ContainerStarted","Data":"a9c646c9b983b8d7dfe9cbfa1ca87bace22818cbaf0198cee8be405380da0a9d"} Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.176536 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.180131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerStarted","Data":"6174ea5ec5632d883860f887d599d4d0a0eb2d5738dafd6f0378924506747e7c"} Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.181404 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.193333 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" podStartSLOduration=3.193313856 podStartE2EDuration="3.193313856s" podCreationTimestamp="2025-10-09 15:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:23.191132509 +0000 UTC m=+5818.964923548" watchObservedRunningTime="2025-10-09 15:02:23.193313856 +0000 UTC m=+5818.967104895" Oct 09 15:02:23 crc kubenswrapper[4762]: I1009 15:02:23.214784 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.214766644 podStartE2EDuration="3.214766644s" podCreationTimestamp="2025-10-09 15:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:23.212754543 +0000 UTC m=+5818.986545602" watchObservedRunningTime="2025-10-09 15:02:23.214766644 +0000 UTC m=+5818.988557683" Oct 09 15:02:30 crc kubenswrapper[4762]: I1009 15:02:30.787097 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:02:30 crc kubenswrapper[4762]: I1009 15:02:30.849518 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:02:30 crc kubenswrapper[4762]: I1009 15:02:30.849863 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="dnsmasq-dns" containerID="cri-o://83c5f3091b6a9df7c98181c9d09065689902f7e29bbb06a7c418769f03b49ac1" gracePeriod=10 Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.257229 4762 generic.go:334] "Generic (PLEG): container finished" podID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerID="83c5f3091b6a9df7c98181c9d09065689902f7e29bbb06a7c418769f03b49ac1" exitCode=0 Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.257277 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" event={"ID":"f910c1bd-7092-4524-8fd2-e3612bdb268c","Type":"ContainerDied","Data":"83c5f3091b6a9df7c98181c9d09065689902f7e29bbb06a7c418769f03b49ac1"} Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.257511 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" event={"ID":"f910c1bd-7092-4524-8fd2-e3612bdb268c","Type":"ContainerDied","Data":"bdac00e9ebb8d93e5b95d6a5df947198821ac8669e9255e3bd2e0578cadc4559"} Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.257533 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdac00e9ebb8d93e5b95d6a5df947198821ac8669e9255e3bd2e0578cadc4559" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.325735 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.426517 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc\") pod \"f910c1bd-7092-4524-8fd2-e3612bdb268c\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.426709 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb\") pod \"f910c1bd-7092-4524-8fd2-e3612bdb268c\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.427449 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb\") pod \"f910c1bd-7092-4524-8fd2-e3612bdb268c\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.427497 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config\") pod \"f910c1bd-7092-4524-8fd2-e3612bdb268c\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.427549 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbr68\" (UniqueName: \"kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68\") pod \"f910c1bd-7092-4524-8fd2-e3612bdb268c\" (UID: \"f910c1bd-7092-4524-8fd2-e3612bdb268c\") " Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.433165 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68" (OuterVolumeSpecName: "kube-api-access-nbr68") pod "f910c1bd-7092-4524-8fd2-e3612bdb268c" (UID: "f910c1bd-7092-4524-8fd2-e3612bdb268c"). InnerVolumeSpecName "kube-api-access-nbr68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.476919 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f910c1bd-7092-4524-8fd2-e3612bdb268c" (UID: "f910c1bd-7092-4524-8fd2-e3612bdb268c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.477892 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config" (OuterVolumeSpecName: "config") pod "f910c1bd-7092-4524-8fd2-e3612bdb268c" (UID: "f910c1bd-7092-4524-8fd2-e3612bdb268c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.486430 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f910c1bd-7092-4524-8fd2-e3612bdb268c" (UID: "f910c1bd-7092-4524-8fd2-e3612bdb268c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.507155 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f910c1bd-7092-4524-8fd2-e3612bdb268c" (UID: "f910c1bd-7092-4524-8fd2-e3612bdb268c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.529809 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.529852 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.529869 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.529883 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbr68\" (UniqueName: \"kubernetes.io/projected/f910c1bd-7092-4524-8fd2-e3612bdb268c-kube-api-access-nbr68\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:31 crc kubenswrapper[4762]: I1009 15:02:31.529898 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f910c1bd-7092-4524-8fd2-e3612bdb268c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.264735 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84486dbb57-bsk9z" Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.301726 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.311025 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84486dbb57-bsk9z"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.666655 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.666961 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="f7a17d87-345b-4451-8d40-71321e9134bc" containerName="nova-cell0-conductor-conductor" containerID="cri-o://24fb88ee4db89227d1bba89ac38d3996ae7a6626c0d970e743ad5632790faff4" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.684431 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.684715 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerName="nova-scheduler-scheduler" containerID="cri-o://32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.697785 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.698073 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" containerID="cri-o://2aad9fa3bfe787bbaf51acb89a9c6dd57b3e820e06eda0485dc0c4fb5af34639" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.698181 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" containerID="cri-o://3f0ba421cd0c7d22de9fe94627781f111e4d43969f14da04dd768d3e0c454e24" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.713556 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.714178 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-log" containerID="cri-o://237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.714227 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-api" containerID="cri-o://47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.760702 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.760981 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.791488 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.791830 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerName="nova-cell1-conductor-conductor" containerID="cri-o://edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" gracePeriod=30 Oct 09 15:02:32 crc kubenswrapper[4762]: I1009 15:02:32.975963 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" path="/var/lib/kubelet/pods/f910c1bd-7092-4524-8fd2-e3612bdb268c/volumes" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.213229 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.278452 4762 generic.go:334] "Generic (PLEG): container finished" podID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerID="237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f" exitCode=143 Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.278816 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerDied","Data":"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f"} Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.290381 4762 generic.go:334] "Generic (PLEG): container finished" podID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerID="2aad9fa3bfe787bbaf51acb89a9c6dd57b3e820e06eda0485dc0c4fb5af34639" exitCode=143 Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.290427 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerDied","Data":"2aad9fa3bfe787bbaf51acb89a9c6dd57b3e820e06eda0485dc0c4fb5af34639"} Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.671373 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.673804 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7bxs\" (UniqueName: \"kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs\") pod \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.673961 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data\") pod \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.674021 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle\") pod \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\" (UID: \"96a2eb96-11dc-4ef6-9dec-fcd753eafea4\") " Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.679304 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs" (OuterVolumeSpecName: "kube-api-access-b7bxs") pod "96a2eb96-11dc-4ef6-9dec-fcd753eafea4" (UID: "96a2eb96-11dc-4ef6-9dec-fcd753eafea4"). InnerVolumeSpecName "kube-api-access-b7bxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.730117 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data" (OuterVolumeSpecName: "config-data") pod "96a2eb96-11dc-4ef6-9dec-fcd753eafea4" (UID: "96a2eb96-11dc-4ef6-9dec-fcd753eafea4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.731893 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96a2eb96-11dc-4ef6-9dec-fcd753eafea4" (UID: "96a2eb96-11dc-4ef6-9dec-fcd753eafea4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.776938 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7bxs\" (UniqueName: \"kubernetes.io/projected/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-kube-api-access-b7bxs\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.776973 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:33 crc kubenswrapper[4762]: I1009 15:02:33.776983 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96a2eb96-11dc-4ef6-9dec-fcd753eafea4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:33 crc kubenswrapper[4762]: E1009 15:02:33.983614 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:02:33 crc kubenswrapper[4762]: E1009 15:02:33.985495 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:02:33 crc kubenswrapper[4762]: E1009 15:02:33.986862 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 15:02:33 crc kubenswrapper[4762]: E1009 15:02:33.986922 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerName="nova-scheduler-scheduler" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.300449 4762 generic.go:334] "Generic (PLEG): container finished" podID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" containerID="a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039" exitCode=0 Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.300833 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"96a2eb96-11dc-4ef6-9dec-fcd753eafea4","Type":"ContainerDied","Data":"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039"} Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.300861 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"96a2eb96-11dc-4ef6-9dec-fcd753eafea4","Type":"ContainerDied","Data":"47a849ee73d6676b58cabeb62e6ca68a99ac5f0a2b73f50155545e3b30ea9341"} Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.300877 4762 scope.go:117] "RemoveContainer" containerID="a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.301008 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.355777 4762 scope.go:117] "RemoveContainer" containerID="a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039" Oct 09 15:02:34 crc kubenswrapper[4762]: E1009 15:02:34.361198 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039\": container with ID starting with a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039 not found: ID does not exist" containerID="a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.361252 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039"} err="failed to get container status \"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039\": rpc error: code = NotFound desc = could not find container \"a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039\": container with ID starting with a33e18d0ec895c00a6524398c2c7b401269f98a3c4cfecce451ac2d0cbfa5039 not found: ID does not exist" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.369692 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.377527 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.397709 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:34 crc kubenswrapper[4762]: E1009 15:02:34.398234 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="init" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.398261 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="init" Oct 09 15:02:34 crc kubenswrapper[4762]: E1009 15:02:34.398294 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.398303 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 15:02:34 crc kubenswrapper[4762]: E1009 15:02:34.398322 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="dnsmasq-dns" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.398330 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="dnsmasq-dns" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.398569 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f910c1bd-7092-4524-8fd2-e3612bdb268c" containerName="dnsmasq-dns" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.398595 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.399486 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.405294 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.420052 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.592292 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.592754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.592834 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-774x8\" (UniqueName: \"kubernetes.io/projected/17576139-60b8-4084-ab82-dddbc2736e43-kube-api-access-774x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.694474 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.694545 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.694569 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-774x8\" (UniqueName: \"kubernetes.io/projected/17576139-60b8-4084-ab82-dddbc2736e43-kube-api-access-774x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.700162 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.700346 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17576139-60b8-4084-ab82-dddbc2736e43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.721146 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-774x8\" (UniqueName: \"kubernetes.io/projected/17576139-60b8-4084-ab82-dddbc2736e43-kube-api-access-774x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"17576139-60b8-4084-ab82-dddbc2736e43\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.721695 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:34 crc kubenswrapper[4762]: I1009 15:02:34.980730 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96a2eb96-11dc-4ef6-9dec-fcd753eafea4" path="/var/lib/kubelet/pods/96a2eb96-11dc-4ef6-9dec-fcd753eafea4/volumes" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.222565 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 15:02:35 crc kubenswrapper[4762]: W1009 15:02:35.229030 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17576139_60b8_4084_ab82_dddbc2736e43.slice/crio-337a476f7942d256d6cb4a22e7499f8308aeb7412c3c630f224e00c1db310715 WatchSource:0}: Error finding container 337a476f7942d256d6cb4a22e7499f8308aeb7412c3c630f224e00c1db310715: Status 404 returned error can't find the container with id 337a476f7942d256d6cb4a22e7499f8308aeb7412c3c630f224e00c1db310715 Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.319969 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"17576139-60b8-4084-ab82-dddbc2736e43","Type":"ContainerStarted","Data":"337a476f7942d256d6cb4a22e7499f8308aeb7412c3c630f224e00c1db310715"} Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.322102 4762 generic.go:334] "Generic (PLEG): container finished" podID="f7a17d87-345b-4451-8d40-71321e9134bc" containerID="24fb88ee4db89227d1bba89ac38d3996ae7a6626c0d970e743ad5632790faff4" exitCode=0 Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.322136 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f7a17d87-345b-4451-8d40-71321e9134bc","Type":"ContainerDied","Data":"24fb88ee4db89227d1bba89ac38d3996ae7a6626c0d970e743ad5632790faff4"} Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.501615 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:35 crc kubenswrapper[4762]: E1009 15:02:35.538715 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 15:02:35 crc kubenswrapper[4762]: E1009 15:02:35.541593 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 15:02:35 crc kubenswrapper[4762]: E1009 15:02:35.542942 4762 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 09 15:02:35 crc kubenswrapper[4762]: E1009 15:02:35.543022 4762 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerName="nova-cell1-conductor-conductor" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.610849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data\") pod \"f7a17d87-345b-4451-8d40-71321e9134bc\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.610999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle\") pod \"f7a17d87-345b-4451-8d40-71321e9134bc\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.611086 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljwbd\" (UniqueName: \"kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd\") pod \"f7a17d87-345b-4451-8d40-71321e9134bc\" (UID: \"f7a17d87-345b-4451-8d40-71321e9134bc\") " Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.616237 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd" (OuterVolumeSpecName: "kube-api-access-ljwbd") pod "f7a17d87-345b-4451-8d40-71321e9134bc" (UID: "f7a17d87-345b-4451-8d40-71321e9134bc"). InnerVolumeSpecName "kube-api-access-ljwbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.635093 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7a17d87-345b-4451-8d40-71321e9134bc" (UID: "f7a17d87-345b-4451-8d40-71321e9134bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.637745 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data" (OuterVolumeSpecName: "config-data") pod "f7a17d87-345b-4451-8d40-71321e9134bc" (UID: "f7a17d87-345b-4451-8d40-71321e9134bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.714322 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.714366 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d87-345b-4451-8d40-71321e9134bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:35 crc kubenswrapper[4762]: I1009 15:02:35.714381 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljwbd\" (UniqueName: \"kubernetes.io/projected/f7a17d87-345b-4451-8d40-71321e9134bc-kube-api-access-ljwbd\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.108360 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.78:8775/\": read tcp 10.217.0.2:55448->10.217.1.78:8775: read: connection reset by peer" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.109012 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.78:8775/\": read tcp 10.217.0.2:55460->10.217.1.78:8775: read: connection reset by peer" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.241591 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.327082 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data\") pod \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.327212 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w6ss\" (UniqueName: \"kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss\") pod \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.327244 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle\") pod \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.327268 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs\") pod \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\" (UID: \"0719eaf7-7d99-461f-87eb-a59afb72d0bf\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.328089 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs" (OuterVolumeSpecName: "logs") pod "0719eaf7-7d99-461f-87eb-a59afb72d0bf" (UID: "0719eaf7-7d99-461f-87eb-a59afb72d0bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.343055 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss" (OuterVolumeSpecName: "kube-api-access-8w6ss") pod "0719eaf7-7d99-461f-87eb-a59afb72d0bf" (UID: "0719eaf7-7d99-461f-87eb-a59afb72d0bf"). InnerVolumeSpecName "kube-api-access-8w6ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.350157 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0719eaf7-7d99-461f-87eb-a59afb72d0bf" (UID: "0719eaf7-7d99-461f-87eb-a59afb72d0bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.358672 4762 generic.go:334] "Generic (PLEG): container finished" podID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerID="47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f" exitCode=0 Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.358877 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.359067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerDied","Data":"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f"} Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.359136 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0719eaf7-7d99-461f-87eb-a59afb72d0bf","Type":"ContainerDied","Data":"0689ce6b5e86d2709a723e53d8f443b87a20a909f3ac23dda58263708539fe63"} Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.359163 4762 scope.go:117] "RemoveContainer" containerID="47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.363777 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"17576139-60b8-4084-ab82-dddbc2736e43","Type":"ContainerStarted","Data":"5b8c627ef436d9492353fb3f366de3e29fca2c3458e150414bd9a75784a1de08"} Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.374331 4762 generic.go:334] "Generic (PLEG): container finished" podID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerID="3f0ba421cd0c7d22de9fe94627781f111e4d43969f14da04dd768d3e0c454e24" exitCode=0 Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.374408 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerDied","Data":"3f0ba421cd0c7d22de9fe94627781f111e4d43969f14da04dd768d3e0c454e24"} Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.378816 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data" (OuterVolumeSpecName: "config-data") pod "0719eaf7-7d99-461f-87eb-a59afb72d0bf" (UID: "0719eaf7-7d99-461f-87eb-a59afb72d0bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.379559 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f7a17d87-345b-4451-8d40-71321e9134bc","Type":"ContainerDied","Data":"9d71b93cf083919683c698276c68ba295f3bef4804a7e87e401b11da56e87826"} Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.379703 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.397138 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.397113726 podStartE2EDuration="2.397113726s" podCreationTimestamp="2025-10-09 15:02:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:36.385515174 +0000 UTC m=+5832.159306233" watchObservedRunningTime="2025-10-09 15:02:36.397113726 +0000 UTC m=+5832.170904775" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.430726 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.431052 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w6ss\" (UniqueName: \"kubernetes.io/projected/0719eaf7-7d99-461f-87eb-a59afb72d0bf-kube-api-access-8w6ss\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.431147 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0719eaf7-7d99-461f-87eb-a59afb72d0bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.431301 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0719eaf7-7d99-461f-87eb-a59afb72d0bf-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.437576 4762 scope.go:117] "RemoveContainer" containerID="237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.462578 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.483009 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.488802 4762 scope.go:117] "RemoveContainer" containerID="47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f" Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.489255 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f\": container with ID starting with 47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f not found: ID does not exist" containerID="47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.489314 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f"} err="failed to get container status \"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f\": rpc error: code = NotFound desc = could not find container \"47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f\": container with ID starting with 47ee8af1321c47367005acb90e0e1865f92048446ca2488f57eba1e99d8b9b0f not found: ID does not exist" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.489348 4762 scope.go:117] "RemoveContainer" containerID="237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f" Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.489693 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f\": container with ID starting with 237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f not found: ID does not exist" containerID="237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.489720 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f"} err="failed to get container status \"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f\": rpc error: code = NotFound desc = could not find container \"237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f\": container with ID starting with 237d57cdf05ac66216be6a37caf4682604833d1e9ccd043b7ebe91cbf950fb9f not found: ID does not exist" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.489739 4762 scope.go:117] "RemoveContainer" containerID="24fb88ee4db89227d1bba89ac38d3996ae7a6626c0d970e743ad5632790faff4" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.492590 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.493201 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a17d87-345b-4451-8d40-71321e9134bc" containerName="nova-cell0-conductor-conductor" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493229 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a17d87-345b-4451-8d40-71321e9134bc" containerName="nova-cell0-conductor-conductor" Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.493248 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-api" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493258 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-api" Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.493271 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493279 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493513 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-api" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493541 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a17d87-345b-4451-8d40-71321e9134bc" containerName="nova-cell0-conductor-conductor" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.493562 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" containerName="nova-api-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.494621 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.498371 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.502438 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.635751 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.635876 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9f4k\" (UniqueName: \"kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.635944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.739921 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.740004 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.740121 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9f4k\" (UniqueName: \"kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.744620 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.744781 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.755525 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9f4k\" (UniqueName: \"kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k\") pod \"nova-cell0-conductor-0\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.760957 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.773847 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.788471 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.801145 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.801727 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.801753 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" Oct 09 15:02:36 crc kubenswrapper[4762]: E1009 15:02:36.801790 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.801809 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.802021 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-log" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.802066 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" containerName="nova-metadata-metadata" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.803347 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.808948 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.817715 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.825576 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.841010 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqzxw\" (UniqueName: \"kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw\") pod \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.841268 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data\") pod \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.841353 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle\") pod \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.841576 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs\") pod \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\" (UID: \"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc\") " Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.842525 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs" (OuterVolumeSpecName: "logs") pod "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" (UID: "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.845113 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw" (OuterVolumeSpecName: "kube-api-access-dqzxw") pod "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" (UID: "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc"). InnerVolumeSpecName "kube-api-access-dqzxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.877828 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" (UID: "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.883002 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data" (OuterVolumeSpecName: "config-data") pod "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" (UID: "916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.943845 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944246 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944413 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5j6p\" (UniqueName: \"kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944500 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944580 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944599 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqzxw\" (UniqueName: \"kubernetes.io/projected/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-kube-api-access-dqzxw\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944612 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.944623 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.988197 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0719eaf7-7d99-461f-87eb-a59afb72d0bf" path="/var/lib/kubelet/pods/0719eaf7-7d99-461f-87eb-a59afb72d0bf/volumes" Oct 09 15:02:36 crc kubenswrapper[4762]: I1009 15:02:36.989421 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a17d87-345b-4451-8d40-71321e9134bc" path="/var/lib/kubelet/pods/f7a17d87-345b-4451-8d40-71321e9134bc/volumes" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.045831 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.045951 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.046369 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.047312 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5j6p\" (UniqueName: \"kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.047869 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.052150 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.052810 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.064322 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5j6p\" (UniqueName: \"kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p\") pod \"nova-api-0\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.126407 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.305612 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.404693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cb5ae0d7-b04d-4fef-bbb1-66cb10260905","Type":"ContainerStarted","Data":"0c4fd615b33fe42b573c4d373f0a9abfaf58be334a657ac3df499fca11252ba1"} Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.413165 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc","Type":"ContainerDied","Data":"96da8ce0dc1873e3502f1fec7e10583798040d5f1a433dcc8b21d17fc30d27d9"} Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.413248 4762 scope.go:117] "RemoveContainer" containerID="3f0ba421cd0c7d22de9fe94627781f111e4d43969f14da04dd768d3e0c454e24" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.413476 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.463771 4762 scope.go:117] "RemoveContainer" containerID="2aad9fa3bfe787bbaf51acb89a9c6dd57b3e820e06eda0485dc0c4fb5af34639" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.482595 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.494293 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.509328 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.511515 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.514002 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.525568 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.642506 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.671407 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.671540 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.671582 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-469dz\" (UniqueName: \"kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.671603 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.773913 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.774308 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.774345 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-469dz\" (UniqueName: \"kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.774370 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.776103 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.783746 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.784683 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.819850 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-469dz\" (UniqueName: \"kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz\") pod \"nova-metadata-0\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " pod="openstack/nova-metadata-0" Oct 09 15:02:37 crc kubenswrapper[4762]: I1009 15:02:37.829815 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.008856 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.087339 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data\") pod \"86fd2721-354b-4c22-91e5-ba277c8b49ba\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.087573 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle\") pod \"86fd2721-354b-4c22-91e5-ba277c8b49ba\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.088072 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77khq\" (UniqueName: \"kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq\") pod \"86fd2721-354b-4c22-91e5-ba277c8b49ba\" (UID: \"86fd2721-354b-4c22-91e5-ba277c8b49ba\") " Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.092144 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq" (OuterVolumeSpecName: "kube-api-access-77khq") pod "86fd2721-354b-4c22-91e5-ba277c8b49ba" (UID: "86fd2721-354b-4c22-91e5-ba277c8b49ba"). InnerVolumeSpecName "kube-api-access-77khq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.111008 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data" (OuterVolumeSpecName: "config-data") pod "86fd2721-354b-4c22-91e5-ba277c8b49ba" (UID: "86fd2721-354b-4c22-91e5-ba277c8b49ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.111781 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86fd2721-354b-4c22-91e5-ba277c8b49ba" (UID: "86fd2721-354b-4c22-91e5-ba277c8b49ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.192721 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.193188 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77khq\" (UniqueName: \"kubernetes.io/projected/86fd2721-354b-4c22-91e5-ba277c8b49ba-kube-api-access-77khq\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.193268 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86fd2721-354b-4c22-91e5-ba277c8b49ba-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.302877 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:02:38 crc kubenswrapper[4762]: W1009 15:02:38.312006 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice/crio-12c77e5d07fc1f53d0f9a87d43b6bab39dfd3218906d019e54a39a91ea4cc833 WatchSource:0}: Error finding container 12c77e5d07fc1f53d0f9a87d43b6bab39dfd3218906d019e54a39a91ea4cc833: Status 404 returned error can't find the container with id 12c77e5d07fc1f53d0f9a87d43b6bab39dfd3218906d019e54a39a91ea4cc833 Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.422684 4762 generic.go:334] "Generic (PLEG): container finished" podID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" exitCode=0 Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.422747 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86fd2721-354b-4c22-91e5-ba277c8b49ba","Type":"ContainerDied","Data":"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.422889 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.422946 4762 scope.go:117] "RemoveContainer" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.426800 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"86fd2721-354b-4c22-91e5-ba277c8b49ba","Type":"ContainerDied","Data":"3dc9373db0e9b12a8eeb6ed8295e591e88acae30906faf84d8f6f077465c2482"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.426835 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerStarted","Data":"12c77e5d07fc1f53d0f9a87d43b6bab39dfd3218906d019e54a39a91ea4cc833"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.427590 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerStarted","Data":"cb38c41a804e3cabe8de7033c6b2ddfb8dbc929246868ac398768e832835b202"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.427662 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerStarted","Data":"962de3aef6def46549360f3931b75c5d2559742f3be1a1bb3ad211b71e156e39"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.427676 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerStarted","Data":"ffb1d622d976be3ef509a4c92d27500ef85a0f5365d3eefa3060a1fe3d4dc463"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.432088 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cb5ae0d7-b04d-4fef-bbb1-66cb10260905","Type":"ContainerStarted","Data":"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0"} Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.432212 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.446024 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.445983326 podStartE2EDuration="2.445983326s" podCreationTimestamp="2025-10-09 15:02:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:38.442851665 +0000 UTC m=+5834.216642714" watchObservedRunningTime="2025-10-09 15:02:38.445983326 +0000 UTC m=+5834.219774365" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.455886 4762 scope.go:117] "RemoveContainer" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" Oct 09 15:02:38 crc kubenswrapper[4762]: E1009 15:02:38.461011 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319\": container with ID starting with 32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319 not found: ID does not exist" containerID="32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.461069 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319"} err="failed to get container status \"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319\": rpc error: code = NotFound desc = could not find container \"32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319\": container with ID starting with 32741d5fb11453900a4986db0b82e546861eb2f926aa83babbf3872160023319 not found: ID does not exist" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.464745 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.464726355 podStartE2EDuration="2.464726355s" podCreationTimestamp="2025-10-09 15:02:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:38.461322415 +0000 UTC m=+5834.235113464" watchObservedRunningTime="2025-10-09 15:02:38.464726355 +0000 UTC m=+5834.238517394" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.491127 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.502135 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.510072 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:38 crc kubenswrapper[4762]: E1009 15:02:38.510544 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerName="nova-scheduler-scheduler" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.510565 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerName="nova-scheduler-scheduler" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.510752 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" containerName="nova-scheduler-scheduler" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.511432 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.514539 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.519758 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.602811 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68k77\" (UniqueName: \"kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.602853 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.602894 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.704488 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.704533 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68k77\" (UniqueName: \"kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.704568 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.712445 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.712510 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.737234 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68k77\" (UniqueName: \"kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77\") pod \"nova-scheduler-0\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.919717 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.978486 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86fd2721-354b-4c22-91e5-ba277c8b49ba" path="/var/lib/kubelet/pods/86fd2721-354b-4c22-91e5-ba277c8b49ba/volumes" Oct 09 15:02:38 crc kubenswrapper[4762]: I1009 15:02:38.979292 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc" path="/var/lib/kubelet/pods/916a7bbc-1cdb-4ff9-aba8-93f48e44d0bc/volumes" Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.341541 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:02:39 crc kubenswrapper[4762]: W1009 15:02:39.347159 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode38ba711_b6dd_42aa_b526_3c170fea1b48.slice/crio-bf5178fe8e51d8f223e9f554ca8535b71a24ade323b47701942c852465c71230 WatchSource:0}: Error finding container bf5178fe8e51d8f223e9f554ca8535b71a24ade323b47701942c852465c71230: Status 404 returned error can't find the container with id bf5178fe8e51d8f223e9f554ca8535b71a24ade323b47701942c852465c71230 Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.461398 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerStarted","Data":"67600f189fe2409693d2e29080955b9f8156a17d49cb0308f7b361f62a93328b"} Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.461651 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerStarted","Data":"87cff1325fecaf8e8cfdd99e4c8c9bd4f23a4940e582f3edfc790b4c7e3e38fb"} Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.467480 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e38ba711-b6dd-42aa-b526-3c170fea1b48","Type":"ContainerStarted","Data":"bf5178fe8e51d8f223e9f554ca8535b71a24ade323b47701942c852465c71230"} Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.488887 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.488867287 podStartE2EDuration="2.488867287s" podCreationTimestamp="2025-10-09 15:02:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:39.481189077 +0000 UTC m=+5835.254980116" watchObservedRunningTime="2025-10-09 15:02:39.488867287 +0000 UTC m=+5835.262658326" Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.722937 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:39 crc kubenswrapper[4762]: I1009 15:02:39.898932 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.030442 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6898d\" (UniqueName: \"kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d\") pod \"1d97958c-5c80-4412-8277-d6ab937f9d0d\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.030529 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data\") pod \"1d97958c-5c80-4412-8277-d6ab937f9d0d\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.030677 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle\") pod \"1d97958c-5c80-4412-8277-d6ab937f9d0d\" (UID: \"1d97958c-5c80-4412-8277-d6ab937f9d0d\") " Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.035450 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d" (OuterVolumeSpecName: "kube-api-access-6898d") pod "1d97958c-5c80-4412-8277-d6ab937f9d0d" (UID: "1d97958c-5c80-4412-8277-d6ab937f9d0d"). InnerVolumeSpecName "kube-api-access-6898d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.058990 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d97958c-5c80-4412-8277-d6ab937f9d0d" (UID: "1d97958c-5c80-4412-8277-d6ab937f9d0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.059473 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data" (OuterVolumeSpecName: "config-data") pod "1d97958c-5c80-4412-8277-d6ab937f9d0d" (UID: "1d97958c-5c80-4412-8277-d6ab937f9d0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.133285 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.133345 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6898d\" (UniqueName: \"kubernetes.io/projected/1d97958c-5c80-4412-8277-d6ab937f9d0d-kube-api-access-6898d\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.133357 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d97958c-5c80-4412-8277-d6ab937f9d0d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.482126 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e38ba711-b6dd-42aa-b526-3c170fea1b48","Type":"ContainerStarted","Data":"15cb2c91808e21b1ad1a1979319a478cd829ba0b947ab509503e46c9bf29f406"} Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.484124 4762 generic.go:334] "Generic (PLEG): container finished" podID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" exitCode=0 Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.484249 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1d97958c-5c80-4412-8277-d6ab937f9d0d","Type":"ContainerDied","Data":"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb"} Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.484264 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.484315 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1d97958c-5c80-4412-8277-d6ab937f9d0d","Type":"ContainerDied","Data":"25dd514855a987b3e3453b3c9406de8311a0b4ba59405a8040ac5ebc6fda9354"} Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.484339 4762 scope.go:117] "RemoveContainer" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.507812 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.5077832239999998 podStartE2EDuration="2.507783224s" podCreationTimestamp="2025-10-09 15:02:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:40.497037154 +0000 UTC m=+5836.270828193" watchObservedRunningTime="2025-10-09 15:02:40.507783224 +0000 UTC m=+5836.281574313" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.513327 4762 scope.go:117] "RemoveContainer" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" Oct 09 15:02:40 crc kubenswrapper[4762]: E1009 15:02:40.515947 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb\": container with ID starting with edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb not found: ID does not exist" containerID="edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.516102 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb"} err="failed to get container status \"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb\": rpc error: code = NotFound desc = could not find container \"edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb\": container with ID starting with edc931756ed71cf5993448a5246873dcfb5d1b50111573864185a409dbfa82cb not found: ID does not exist" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.550704 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.562130 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.570739 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:40 crc kubenswrapper[4762]: E1009 15:02:40.571272 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerName="nova-cell1-conductor-conductor" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.571296 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerName="nova-cell1-conductor-conductor" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.571505 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" containerName="nova-cell1-conductor-conductor" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.572263 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.574663 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.579641 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.645728 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.645775 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87s9w\" (UniqueName: \"kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.645838 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.748042 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.748325 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.748398 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87s9w\" (UniqueName: \"kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.762475 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.764808 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.767042 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87s9w\" (UniqueName: \"kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w\") pod \"nova-cell1-conductor-0\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.889287 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:40 crc kubenswrapper[4762]: I1009 15:02:40.983460 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d97958c-5c80-4412-8277-d6ab937f9d0d" path="/var/lib/kubelet/pods/1d97958c-5c80-4412-8277-d6ab937f9d0d/volumes" Oct 09 15:02:41 crc kubenswrapper[4762]: I1009 15:02:41.328585 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:02:41 crc kubenswrapper[4762]: I1009 15:02:41.493895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a","Type":"ContainerStarted","Data":"8c5225f1ed1c2dd5f9b8bd051f1b2a746f76b3ad9ee8758ee471811c43a98136"} Oct 09 15:02:42 crc kubenswrapper[4762]: I1009 15:02:42.507426 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a","Type":"ContainerStarted","Data":"1312067d749b99aa2f44938ef957c3164f311ce78be283ecede708f2bdfa44ca"} Oct 09 15:02:42 crc kubenswrapper[4762]: I1009 15:02:42.508845 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:42 crc kubenswrapper[4762]: I1009 15:02:42.525713 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.525614066 podStartE2EDuration="2.525614066s" podCreationTimestamp="2025-10-09 15:02:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:42.521928201 +0000 UTC m=+5838.295719260" watchObservedRunningTime="2025-10-09 15:02:42.525614066 +0000 UTC m=+5838.299405105" Oct 09 15:02:42 crc kubenswrapper[4762]: I1009 15:02:42.830417 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:02:42 crc kubenswrapper[4762]: I1009 15:02:42.831543 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:02:43 crc kubenswrapper[4762]: I1009 15:02:43.920618 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 15:02:44 crc kubenswrapper[4762]: I1009 15:02:44.722748 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:44 crc kubenswrapper[4762]: I1009 15:02:44.735069 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:45 crc kubenswrapper[4762]: I1009 15:02:45.545018 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 09 15:02:46 crc kubenswrapper[4762]: I1009 15:02:46.854661 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 09 15:02:47 crc kubenswrapper[4762]: I1009 15:02:47.127245 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:02:47 crc kubenswrapper[4762]: I1009 15:02:47.127325 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:02:47 crc kubenswrapper[4762]: I1009 15:02:47.830509 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:02:47 crc kubenswrapper[4762]: I1009 15:02:47.830570 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.209908 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.87:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.209923 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.87:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.913900 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.913933 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.920390 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 15:02:48 crc kubenswrapper[4762]: I1009 15:02:48.950344 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 15:02:49 crc kubenswrapper[4762]: I1009 15:02:49.601757 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 15:02:50 crc kubenswrapper[4762]: I1009 15:02:50.920323 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.619834 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.621413 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.624015 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.636844 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.681555 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.681736 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rgwz\" (UniqueName: \"kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.681879 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.681976 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.683413 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.683751 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785282 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785358 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rgwz\" (UniqueName: \"kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785444 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785475 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785556 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.785663 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.794889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.794945 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.795469 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.804581 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.808758 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rgwz\" (UniqueName: \"kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz\") pod \"cinder-scheduler-0\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " pod="openstack/cinder-scheduler-0" Oct 09 15:02:51 crc kubenswrapper[4762]: I1009 15:02:51.942020 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:02:52 crc kubenswrapper[4762]: I1009 15:02:52.418796 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:02:52 crc kubenswrapper[4762]: I1009 15:02:52.608712 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerStarted","Data":"25d24b3e9a382ff6c329dd2ab651e46a770494052588a02a0cc2290b856f2deb"} Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.260820 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.261086 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api-log" containerID="cri-o://2717941d1e60950bc731a8c9d6203cc9e2bc7873335de8476f87ce9e554301ea" gracePeriod=30 Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.261197 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api" containerID="cri-o://6174ea5ec5632d883860f887d599d4d0a0eb2d5738dafd6f0378924506747e7c" gracePeriod=30 Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.620891 4762 generic.go:334] "Generic (PLEG): container finished" podID="24951274-fd59-4f3d-bf68-ce86450e245e" containerID="2717941d1e60950bc731a8c9d6203cc9e2bc7873335de8476f87ce9e554301ea" exitCode=143 Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.621037 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerDied","Data":"2717941d1e60950bc731a8c9d6203cc9e2bc7873335de8476f87ce9e554301ea"} Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.622806 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerStarted","Data":"e664ae483552515155c932a87eb4d6193ca1b1bc2e3c625c35d81c7542806c47"} Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.771969 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.774066 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.775685 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.797303 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.813139 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.815152 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.819270 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.827564 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.830996 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.832052 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.832450 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.832619 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.833248 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.833406 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.835433 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.835752 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-run\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.836129 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.836519 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.836748 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6rpf\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-kube-api-access-p6rpf\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.836906 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.837171 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.837258 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.837322 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.869079 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.938968 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939020 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939042 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939069 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939096 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-run\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939133 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939169 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939193 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939221 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6rpf\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-kube-api-access-p6rpf\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939237 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939264 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939287 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt4bs\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-kube-api-access-lt4bs\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939326 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939361 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-run\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939378 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939394 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-lib-modules\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939411 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939436 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939460 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939476 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939498 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-scripts\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939516 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-dev\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939535 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939569 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939608 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939627 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-ceph\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939879 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939901 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-sys\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939942 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939963 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.939978 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.941594 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.941736 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.941844 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.941879 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.941900 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-dev\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.942816 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.943048 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.943213 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-run\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.943342 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-sys\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.943473 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6a852936-bff5-48f8-9336-cffaeb34743f-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.947954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.948445 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.948516 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.948715 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.949059 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a852936-bff5-48f8-9336-cffaeb34743f-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:53 crc kubenswrapper[4762]: I1009 15:02:53.958149 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6rpf\" (UniqueName: \"kubernetes.io/projected/6a852936-bff5-48f8-9336-cffaeb34743f-kube-api-access-p6rpf\") pod \"cinder-volume-volume1-0\" (UID: \"6a852936-bff5-48f8-9336-cffaeb34743f\") " pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042382 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt4bs\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-kube-api-access-lt4bs\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042447 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-run\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042468 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-lib-modules\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042483 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042526 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-scripts\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042542 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-dev\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042560 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042595 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042615 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042659 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-ceph\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042677 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042702 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-sys\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042726 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042743 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042767 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.042794 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043141 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043524 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-run\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043581 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-lib-modules\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043623 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-dev\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043616 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.043748 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.044018 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-sys\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.044076 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-nvme\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.044916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.045353 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce78c95b-85d3-453f-9922-2937d0e578f8-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.048399 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-scripts\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.048627 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-ceph\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.051461 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data-custom\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.051477 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-config-data\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.061076 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce78c95b-85d3-453f-9922-2937d0e578f8-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.064845 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt4bs\" (UniqueName: \"kubernetes.io/projected/ce78c95b-85d3-453f-9922-2937d0e578f8-kube-api-access-lt4bs\") pod \"cinder-backup-0\" (UID: \"ce78c95b-85d3-453f-9922-2937d0e578f8\") " pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.099859 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.155349 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.647602 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerStarted","Data":"44950fd867feccdab41e5b990b7e79fc81224978e5ea7a1958589b368048972d"} Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.653544 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.655353 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.665026 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.665007295 podStartE2EDuration="3.665007295s" podCreationTimestamp="2025-10-09 15:02:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:02:54.663701021 +0000 UTC m=+5850.437492080" watchObservedRunningTime="2025-10-09 15:02:54.665007295 +0000 UTC m=+5850.438798334" Oct 09 15:02:54 crc kubenswrapper[4762]: I1009 15:02:54.777605 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 09 15:02:54 crc kubenswrapper[4762]: W1009 15:02:54.781159 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce78c95b_85d3_453f_9922_2937d0e578f8.slice/crio-8b6b310a59cbbf2cd687fbe0ec016cd590edcace2d409f92c407e0297f129bde WatchSource:0}: Error finding container 8b6b310a59cbbf2cd687fbe0ec016cd590edcace2d409f92c407e0297f129bde: Status 404 returned error can't find the container with id 8b6b310a59cbbf2cd687fbe0ec016cd590edcace2d409f92c407e0297f129bde Oct 09 15:02:55 crc kubenswrapper[4762]: I1009 15:02:55.659244 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ce78c95b-85d3-453f-9922-2937d0e578f8","Type":"ContainerStarted","Data":"8b6b310a59cbbf2cd687fbe0ec016cd590edcace2d409f92c407e0297f129bde"} Oct 09 15:02:55 crc kubenswrapper[4762]: I1009 15:02:55.662697 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6a852936-bff5-48f8-9336-cffaeb34743f","Type":"ContainerStarted","Data":"c61401538be744da23e94ae5bb85bb0137cdab5b14fcdf35934cd11f6a978740"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.420886 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.84:8776/healthcheck\": read tcp 10.217.0.2:37134->10.217.1.84:8776: read: connection reset by peer" Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.689566 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6a852936-bff5-48f8-9336-cffaeb34743f","Type":"ContainerStarted","Data":"5976e6c0fca0c1b558b79959b84f19ce3292c1c2c49f62cbeaf5ed9c3585359c"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.689630 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"6a852936-bff5-48f8-9336-cffaeb34743f","Type":"ContainerStarted","Data":"0e69dbe4a882bf0595a8cdd89d14ac9adc2c7f01b3f97825334604a5f6be09b8"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.694690 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ce78c95b-85d3-453f-9922-2937d0e578f8","Type":"ContainerStarted","Data":"6759e8a60b7a7a409572335314003a5eaddffa9948beb62a522642bec2ec66b6"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.694760 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"ce78c95b-85d3-453f-9922-2937d0e578f8","Type":"ContainerStarted","Data":"072de8900065a0ecf4b7f5aec51b935af819ac468e16141f19b8db16fe10a107"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.697155 4762 generic.go:334] "Generic (PLEG): container finished" podID="24951274-fd59-4f3d-bf68-ce86450e245e" containerID="6174ea5ec5632d883860f887d599d4d0a0eb2d5738dafd6f0378924506747e7c" exitCode=0 Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.697187 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerDied","Data":"6174ea5ec5632d883860f887d599d4d0a0eb2d5738dafd6f0378924506747e7c"} Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.715560 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.865748466 podStartE2EDuration="3.715512469s" podCreationTimestamp="2025-10-09 15:02:53 +0000 UTC" firstStartedPulling="2025-10-09 15:02:54.65330224 +0000 UTC m=+5850.427093279" lastFinishedPulling="2025-10-09 15:02:55.503066243 +0000 UTC m=+5851.276857282" observedRunningTime="2025-10-09 15:02:56.708874996 +0000 UTC m=+5852.482666035" watchObservedRunningTime="2025-10-09 15:02:56.715512469 +0000 UTC m=+5852.489303508" Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.759406 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.036009937 podStartE2EDuration="3.75938292s" podCreationTimestamp="2025-10-09 15:02:53 +0000 UTC" firstStartedPulling="2025-10-09 15:02:54.783526711 +0000 UTC m=+5850.557317750" lastFinishedPulling="2025-10-09 15:02:55.506899694 +0000 UTC m=+5851.280690733" observedRunningTime="2025-10-09 15:02:56.746571917 +0000 UTC m=+5852.520362956" watchObservedRunningTime="2025-10-09 15:02:56.75938292 +0000 UTC m=+5852.533173959" Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.902759 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:56 crc kubenswrapper[4762]: I1009 15:02:56.942346 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.019643 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.019821 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.019915 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.020041 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.020071 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4jlp\" (UniqueName: \"kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.020220 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.020298 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom\") pod \"24951274-fd59-4f3d-bf68-ce86450e245e\" (UID: \"24951274-fd59-4f3d-bf68-ce86450e245e\") " Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.021372 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs" (OuterVolumeSpecName: "logs") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.021468 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.045267 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.048789 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts" (OuterVolumeSpecName: "scripts") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.071827 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp" (OuterVolumeSpecName: "kube-api-access-s4jlp") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "kube-api-access-s4jlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.074889 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.081572 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data" (OuterVolumeSpecName: "config-data") pod "24951274-fd59-4f3d-bf68-ce86450e245e" (UID: "24951274-fd59-4f3d-bf68-ce86450e245e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.123678 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.123713 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24951274-fd59-4f3d-bf68-ce86450e245e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.125849 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.125931 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24951274-fd59-4f3d-bf68-ce86450e245e-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.125964 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4jlp\" (UniqueName: \"kubernetes.io/projected/24951274-fd59-4f3d-bf68-ce86450e245e-kube-api-access-s4jlp\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.125978 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.126887 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24951274-fd59-4f3d-bf68-ce86450e245e-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.136798 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.138363 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.138463 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.143956 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.708682 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"24951274-fd59-4f3d-bf68-ce86450e245e","Type":"ContainerDied","Data":"68def4d8f02e49238cae6ae7cb5f6ca8477fe581444e72310dd06a461b82f3c6"} Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.708759 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.709111 4762 scope.go:117] "RemoveContainer" containerID="6174ea5ec5632d883860f887d599d4d0a0eb2d5738dafd6f0378924506747e7c" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.709261 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.713295 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.752469 4762 scope.go:117] "RemoveContainer" containerID="2717941d1e60950bc731a8c9d6203cc9e2bc7873335de8476f87ce9e554301ea" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.763973 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.815466 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.834613 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:57 crc kubenswrapper[4762]: E1009 15:02:57.835196 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api-log" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.835214 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api-log" Oct 09 15:02:57 crc kubenswrapper[4762]: E1009 15:02:57.835229 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.835235 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.835420 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.835447 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" containerName="cinder-api-log" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.836594 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.842139 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.842823 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.843145 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.845217 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.878469 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.956875 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data-custom\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.956960 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.957037 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/967cd183-8741-455b-ba80-eda15fdc1681-logs\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.957181 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfc98\" (UniqueName: \"kubernetes.io/projected/967cd183-8741-455b-ba80-eda15fdc1681-kube-api-access-tfc98\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.957302 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/967cd183-8741-455b-ba80-eda15fdc1681-etc-machine-id\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.957331 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-scripts\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:57 crc kubenswrapper[4762]: I1009 15:02:57.957469 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059430 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059551 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/967cd183-8741-455b-ba80-eda15fdc1681-logs\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059617 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfc98\" (UniqueName: \"kubernetes.io/projected/967cd183-8741-455b-ba80-eda15fdc1681-kube-api-access-tfc98\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059681 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/967cd183-8741-455b-ba80-eda15fdc1681-etc-machine-id\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059704 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-scripts\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059760 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.059844 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data-custom\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.060862 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/967cd183-8741-455b-ba80-eda15fdc1681-logs\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.060942 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/967cd183-8741-455b-ba80-eda15fdc1681-etc-machine-id\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.065549 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.066950 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.067213 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-config-data-custom\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.075474 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/967cd183-8741-455b-ba80-eda15fdc1681-scripts\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.094351 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfc98\" (UniqueName: \"kubernetes.io/projected/967cd183-8741-455b-ba80-eda15fdc1681-kube-api-access-tfc98\") pod \"cinder-api-0\" (UID: \"967cd183-8741-455b-ba80-eda15fdc1681\") " pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.165456 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.665201 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.722104 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"967cd183-8741-455b-ba80-eda15fdc1681","Type":"ContainerStarted","Data":"0cbfab83324dab28d493d641a0d47f1407d70d147d6f805037d3d9a9cb2d4804"} Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.725046 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:02:58 crc kubenswrapper[4762]: I1009 15:02:58.982743 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24951274-fd59-4f3d-bf68-ce86450e245e" path="/var/lib/kubelet/pods/24951274-fd59-4f3d-bf68-ce86450e245e/volumes" Oct 09 15:02:59 crc kubenswrapper[4762]: I1009 15:02:59.101015 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 09 15:02:59 crc kubenswrapper[4762]: I1009 15:02:59.155504 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 09 15:02:59 crc kubenswrapper[4762]: I1009 15:02:59.741741 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"967cd183-8741-455b-ba80-eda15fdc1681","Type":"ContainerStarted","Data":"51f4872acbf25204423f0df4947ea620a7d0d17ceb62cf2da8bf1bdf3012fe5d"} Oct 09 15:03:00 crc kubenswrapper[4762]: I1009 15:03:00.754228 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"967cd183-8741-455b-ba80-eda15fdc1681","Type":"ContainerStarted","Data":"7fdc41c3da613bc9d53b3127d1c27a1433da45ee47692776e3662a5a0e536107"} Oct 09 15:03:00 crc kubenswrapper[4762]: I1009 15:03:00.776267 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.776247326 podStartE2EDuration="3.776247326s" podCreationTimestamp="2025-10-09 15:02:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:03:00.770386074 +0000 UTC m=+5856.544177113" watchObservedRunningTime="2025-10-09 15:03:00.776247326 +0000 UTC m=+5856.550038365" Oct 09 15:03:01 crc kubenswrapper[4762]: I1009 15:03:01.761058 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 15:03:02 crc kubenswrapper[4762]: I1009 15:03:02.136083 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 15:03:02 crc kubenswrapper[4762]: I1009 15:03:02.175143 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:02 crc kubenswrapper[4762]: I1009 15:03:02.769519 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="cinder-scheduler" containerID="cri-o://e664ae483552515155c932a87eb4d6193ca1b1bc2e3c625c35d81c7542806c47" gracePeriod=30 Oct 09 15:03:02 crc kubenswrapper[4762]: I1009 15:03:02.769612 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="probe" containerID="cri-o://44950fd867feccdab41e5b990b7e79fc81224978e5ea7a1958589b368048972d" gracePeriod=30 Oct 09 15:03:03 crc kubenswrapper[4762]: I1009 15:03:03.782453 4762 generic.go:334] "Generic (PLEG): container finished" podID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerID="44950fd867feccdab41e5b990b7e79fc81224978e5ea7a1958589b368048972d" exitCode=0 Oct 09 15:03:03 crc kubenswrapper[4762]: I1009 15:03:03.782515 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerDied","Data":"44950fd867feccdab41e5b990b7e79fc81224978e5ea7a1958589b368048972d"} Oct 09 15:03:04 crc kubenswrapper[4762]: I1009 15:03:04.336487 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 09 15:03:04 crc kubenswrapper[4762]: I1009 15:03:04.389603 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 09 15:03:06 crc kubenswrapper[4762]: I1009 15:03:06.819752 4762 generic.go:334] "Generic (PLEG): container finished" podID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerID="e664ae483552515155c932a87eb4d6193ca1b1bc2e3c625c35d81c7542806c47" exitCode=0 Oct 09 15:03:06 crc kubenswrapper[4762]: I1009 15:03:06.819851 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerDied","Data":"e664ae483552515155c932a87eb4d6193ca1b1bc2e3c625c35d81c7542806c47"} Oct 09 15:03:07 crc kubenswrapper[4762]: I1009 15:03:07.935541 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.092355 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rgwz\" (UniqueName: \"kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.092775 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.092906 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.092924 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.092953 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.093058 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.093117 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data\") pod \"63697ab0-5220-4209-be75-2f136eb3ab6e\" (UID: \"63697ab0-5220-4209-be75-2f136eb3ab6e\") " Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.093949 4762 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63697ab0-5220-4209-be75-2f136eb3ab6e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.106756 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.112457 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz" (OuterVolumeSpecName: "kube-api-access-9rgwz") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "kube-api-access-9rgwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.129939 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts" (OuterVolumeSpecName: "scripts") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.150345 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.195705 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.195745 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.195758 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.195770 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rgwz\" (UniqueName: \"kubernetes.io/projected/63697ab0-5220-4209-be75-2f136eb3ab6e-kube-api-access-9rgwz\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.200085 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data" (OuterVolumeSpecName: "config-data") pod "63697ab0-5220-4209-be75-2f136eb3ab6e" (UID: "63697ab0-5220-4209-be75-2f136eb3ab6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.298026 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63697ab0-5220-4209-be75-2f136eb3ab6e-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.850913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"63697ab0-5220-4209-be75-2f136eb3ab6e","Type":"ContainerDied","Data":"25d24b3e9a382ff6c329dd2ab651e46a770494052588a02a0cc2290b856f2deb"} Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.850974 4762 scope.go:117] "RemoveContainer" containerID="44950fd867feccdab41e5b990b7e79fc81224978e5ea7a1958589b368048972d" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.851129 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.903092 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.913568 4762 scope.go:117] "RemoveContainer" containerID="e664ae483552515155c932a87eb4d6193ca1b1bc2e3c625c35d81c7542806c47" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.921815 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.956544 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:08 crc kubenswrapper[4762]: E1009 15:03:08.957008 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="cinder-scheduler" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.957021 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="cinder-scheduler" Oct 09 15:03:08 crc kubenswrapper[4762]: E1009 15:03:08.957042 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="probe" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.957048 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="probe" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.957349 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="cinder-scheduler" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.957374 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" containerName="probe" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.958825 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.961298 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.983053 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63697ab0-5220-4209-be75-2f136eb3ab6e" path="/var/lib/kubelet/pods/63697ab0-5220-4209-be75-2f136eb3ab6e/volumes" Oct 09 15:03:08 crc kubenswrapper[4762]: I1009 15:03:08.983734 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118269 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xwtr\" (UniqueName: \"kubernetes.io/projected/a98fcbc6-94cf-4c55-8cf2-79d469088e45-kube-api-access-2xwtr\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a98fcbc6-94cf-4c55-8cf2-79d469088e45-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118345 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118384 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118416 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-scripts\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.118465 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.220593 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.220974 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xwtr\" (UniqueName: \"kubernetes.io/projected/a98fcbc6-94cf-4c55-8cf2-79d469088e45-kube-api-access-2xwtr\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.222004 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a98fcbc6-94cf-4c55-8cf2-79d469088e45-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.222157 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.222309 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.222441 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-scripts\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.227310 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a98fcbc6-94cf-4c55-8cf2-79d469088e45-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.232142 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.232311 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.233417 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-scripts\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.234384 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a98fcbc6-94cf-4c55-8cf2-79d469088e45-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.243490 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xwtr\" (UniqueName: \"kubernetes.io/projected/a98fcbc6-94cf-4c55-8cf2-79d469088e45-kube-api-access-2xwtr\") pod \"cinder-scheduler-0\" (UID: \"a98fcbc6-94cf-4c55-8cf2-79d469088e45\") " pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.277549 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.744717 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 15:03:09 crc kubenswrapper[4762]: W1009 15:03:09.747492 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda98fcbc6_94cf_4c55_8cf2_79d469088e45.slice/crio-6edf674f3622d0778f16635b0ea612a77a61248e847dc1e260d9f9021713e4f6 WatchSource:0}: Error finding container 6edf674f3622d0778f16635b0ea612a77a61248e847dc1e260d9f9021713e4f6: Status 404 returned error can't find the container with id 6edf674f3622d0778f16635b0ea612a77a61248e847dc1e260d9f9021713e4f6 Oct 09 15:03:09 crc kubenswrapper[4762]: I1009 15:03:09.863610 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a98fcbc6-94cf-4c55-8cf2-79d469088e45","Type":"ContainerStarted","Data":"6edf674f3622d0778f16635b0ea612a77a61248e847dc1e260d9f9021713e4f6"} Oct 09 15:03:10 crc kubenswrapper[4762]: I1009 15:03:10.134830 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 09 15:03:10 crc kubenswrapper[4762]: I1009 15:03:10.878477 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a98fcbc6-94cf-4c55-8cf2-79d469088e45","Type":"ContainerStarted","Data":"3623429a1c18603890f262fc9c09c0012b9a50be94628a98192f184eadccc01c"} Oct 09 15:03:11 crc kubenswrapper[4762]: I1009 15:03:11.892193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a98fcbc6-94cf-4c55-8cf2-79d469088e45","Type":"ContainerStarted","Data":"1b86c421309ee2a76eedfe2bd149c94cdc7fbf9948dc9d531cdf5141b22f0155"} Oct 09 15:03:11 crc kubenswrapper[4762]: I1009 15:03:11.912391 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.9123681169999998 podStartE2EDuration="3.912368117s" podCreationTimestamp="2025-10-09 15:03:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:03:11.907778717 +0000 UTC m=+5867.681569796" watchObservedRunningTime="2025-10-09 15:03:11.912368117 +0000 UTC m=+5867.686159166" Oct 09 15:03:14 crc kubenswrapper[4762]: I1009 15:03:14.277805 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 15:03:19 crc kubenswrapper[4762]: I1009 15:03:19.486806 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 15:03:35 crc kubenswrapper[4762]: I1009 15:03:35.062172 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-7bttc"] Oct 09 15:03:35 crc kubenswrapper[4762]: I1009 15:03:35.074952 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-7bttc"] Oct 09 15:03:36 crc kubenswrapper[4762]: I1009 15:03:36.983861 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dcaedad-b38e-4fbb-ac6c-db2eea247d60" path="/var/lib/kubelet/pods/5dcaedad-b38e-4fbb-ac6c-db2eea247d60/volumes" Oct 09 15:03:45 crc kubenswrapper[4762]: I1009 15:03:45.038371 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-49cd-account-create-gn5rt"] Oct 09 15:03:45 crc kubenswrapper[4762]: I1009 15:03:45.047133 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-49cd-account-create-gn5rt"] Oct 09 15:03:46 crc kubenswrapper[4762]: I1009 15:03:46.979173 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdba07a6-2fbb-454b-9d69-be7db43f4d3b" path="/var/lib/kubelet/pods/fdba07a6-2fbb-454b-9d69-be7db43f4d3b/volumes" Oct 09 15:03:51 crc kubenswrapper[4762]: I1009 15:03:51.032482 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4ztzj"] Oct 09 15:03:51 crc kubenswrapper[4762]: I1009 15:03:51.062921 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4ztzj"] Oct 09 15:03:52 crc kubenswrapper[4762]: I1009 15:03:52.974728 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bf666c8-1160-4e16-9dc6-3ebcff957d3b" path="/var/lib/kubelet/pods/5bf666c8-1160-4e16-9dc6-3ebcff957d3b/volumes" Oct 09 15:04:08 crc kubenswrapper[4762]: I1009 15:04:08.034437 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-h2vz7"] Oct 09 15:04:08 crc kubenswrapper[4762]: I1009 15:04:08.046349 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-h2vz7"] Oct 09 15:04:08 crc kubenswrapper[4762]: I1009 15:04:08.976529 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="525c4acb-a6b7-41d8-88d3-228e8833a6cc" path="/var/lib/kubelet/pods/525c4acb-a6b7-41d8-88d3-228e8833a6cc/volumes" Oct 09 15:04:33 crc kubenswrapper[4762]: I1009 15:04:33.012519 4762 scope.go:117] "RemoveContainer" containerID="dd40c729d048bb87fb25830d2b9cd9b8a791b91dc3d2291420821cdc7671d43e" Oct 09 15:04:33 crc kubenswrapper[4762]: I1009 15:04:33.054706 4762 scope.go:117] "RemoveContainer" containerID="c542fb80f5c9fd84c36d89efb7ea6bf26e405f7f51f1f4db5c6cfa2fe8f17758" Oct 09 15:04:33 crc kubenswrapper[4762]: I1009 15:04:33.095555 4762 scope.go:117] "RemoveContainer" containerID="7e0f56b6876bdc835a0d3fcf23df65e00d7573a63f8e07a618a1dd4d25b7c338" Oct 09 15:04:33 crc kubenswrapper[4762]: I1009 15:04:33.141265 4762 scope.go:117] "RemoveContainer" containerID="1e54dee4e73901b9126d7b01c15f75cb3978e236c6017212be1ebd23df48dd5d" Oct 09 15:04:41 crc kubenswrapper[4762]: I1009 15:04:41.969456 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:04:41 crc kubenswrapper[4762]: I1009 15:04:41.969997 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.189658 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jphs8"] Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.192894 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.196319 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-622d9" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.196461 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.199711 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-7pjmm"] Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.202218 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.220653 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b14659e8-2771-4473-8ea7-a0d598c1030a-scripts\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.220984 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.221024 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqr97\" (UniqueName: \"kubernetes.io/projected/b14659e8-2771-4473-8ea7-a0d598c1030a-kube-api-access-lqr97\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.221122 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-log-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.221318 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.224321 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8"] Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.236591 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7pjmm"] Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322608 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62z2g\" (UniqueName: \"kubernetes.io/projected/7f289c54-5049-4ded-b960-57885a9525dd-kube-api-access-62z2g\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322666 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-etc-ovs\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322724 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7f289c54-5049-4ded-b960-57885a9525dd-scripts\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322813 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322843 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b14659e8-2771-4473-8ea7-a0d598c1030a-scripts\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322875 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322908 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqr97\" (UniqueName: \"kubernetes.io/projected/b14659e8-2771-4473-8ea7-a0d598c1030a-kube-api-access-lqr97\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322934 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-run\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.322988 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-lib\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.323020 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-log\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.323046 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-log-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.323343 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-log-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.323423 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run-ovn\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.323440 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b14659e8-2771-4473-8ea7-a0d598c1030a-var-run\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.325414 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b14659e8-2771-4473-8ea7-a0d598c1030a-scripts\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.346601 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqr97\" (UniqueName: \"kubernetes.io/projected/b14659e8-2771-4473-8ea7-a0d598c1030a-kube-api-access-lqr97\") pod \"ovn-controller-jphs8\" (UID: \"b14659e8-2771-4473-8ea7-a0d598c1030a\") " pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.424969 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62z2g\" (UniqueName: \"kubernetes.io/projected/7f289c54-5049-4ded-b960-57885a9525dd-kube-api-access-62z2g\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425021 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-etc-ovs\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425041 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7f289c54-5049-4ded-b960-57885a9525dd-scripts\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425107 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-run\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425149 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-lib\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425172 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-log\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425323 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-lib\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425326 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-run\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425342 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-etc-ovs\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.425388 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7f289c54-5049-4ded-b960-57885a9525dd-var-log\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.427396 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7f289c54-5049-4ded-b960-57885a9525dd-scripts\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.440161 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62z2g\" (UniqueName: \"kubernetes.io/projected/7f289c54-5049-4ded-b960-57885a9525dd-kube-api-access-62z2g\") pod \"ovn-controller-ovs-7pjmm\" (UID: \"7f289c54-5049-4ded-b960-57885a9525dd\") " pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.533390 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8" Oct 09 15:04:57 crc kubenswrapper[4762]: I1009 15:04:57.546831 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.072078 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8"] Oct 09 15:04:58 crc kubenswrapper[4762]: W1009 15:04:58.400168 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f289c54_5049_4ded_b960_57885a9525dd.slice/crio-dcd2149a825f389b71870f0c5930a62217057f516f3d347190dc34b7a86b3962 WatchSource:0}: Error finding container dcd2149a825f389b71870f0c5930a62217057f516f3d347190dc34b7a86b3962: Status 404 returned error can't find the container with id dcd2149a825f389b71870f0c5930a62217057f516f3d347190dc34b7a86b3962 Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.403798 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7pjmm"] Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.862844 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tcrqk"] Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.864413 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.874531 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.876297 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tcrqk"] Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.930908 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7pjmm" event={"ID":"7f289c54-5049-4ded-b960-57885a9525dd","Type":"ContainerStarted","Data":"30a1c33a7d4083808d38f412ee735e25f51b30cc7ba10d3d9fbb20556dd97407"} Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.930961 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7pjmm" event={"ID":"7f289c54-5049-4ded-b960-57885a9525dd","Type":"ContainerStarted","Data":"dcd2149a825f389b71870f0c5930a62217057f516f3d347190dc34b7a86b3962"} Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.933677 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8" event={"ID":"b14659e8-2771-4473-8ea7-a0d598c1030a","Type":"ContainerStarted","Data":"f69ce8ec224d4192050b29e765ec3f9f38884759d9b0ceb808f93703f059e7fb"} Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.933722 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8" event={"ID":"b14659e8-2771-4473-8ea7-a0d598c1030a","Type":"ContainerStarted","Data":"03b8844ca1bd99bcf43c027da20e7365c7224745779b2f008d7998ffce293058"} Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.934022 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jphs8" Oct 09 15:04:58 crc kubenswrapper[4762]: I1009 15:04:58.972066 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jphs8" podStartSLOduration=1.9720454699999999 podStartE2EDuration="1.97204547s" podCreationTimestamp="2025-10-09 15:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:04:58.967599084 +0000 UTC m=+5974.741390153" watchObservedRunningTime="2025-10-09 15:04:58.97204547 +0000 UTC m=+5974.745836509" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.055804 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovn-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.055880 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4gbk\" (UniqueName: \"kubernetes.io/projected/347ebf92-b7d3-4407-b556-afe6c5121d88-kube-api-access-x4gbk\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.056071 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovs-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.056117 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/347ebf92-b7d3-4407-b556-afe6c5121d88-config\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157457 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/347ebf92-b7d3-4407-b556-afe6c5121d88-config\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovn-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157573 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4gbk\" (UniqueName: \"kubernetes.io/projected/347ebf92-b7d3-4407-b556-afe6c5121d88-kube-api-access-x4gbk\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157731 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovs-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovn-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.157927 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/347ebf92-b7d3-4407-b556-afe6c5121d88-ovs-rundir\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.158239 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/347ebf92-b7d3-4407-b556-afe6c5121d88-config\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.181384 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4gbk\" (UniqueName: \"kubernetes.io/projected/347ebf92-b7d3-4407-b556-afe6c5121d88-kube-api-access-x4gbk\") pod \"ovn-controller-metrics-tcrqk\" (UID: \"347ebf92-b7d3-4407-b556-afe6c5121d88\") " pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.190288 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tcrqk" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.669160 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-9hzrj"] Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.670899 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9hzrj" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.676568 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-9hzrj"] Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.755761 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tcrqk"] Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.778018 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzmnk\" (UniqueName: \"kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk\") pod \"octavia-db-create-9hzrj\" (UID: \"ada7d5e9-569f-48b8-94c9-f7bad43412d2\") " pod="openstack/octavia-db-create-9hzrj" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.879512 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzmnk\" (UniqueName: \"kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk\") pod \"octavia-db-create-9hzrj\" (UID: \"ada7d5e9-569f-48b8-94c9-f7bad43412d2\") " pod="openstack/octavia-db-create-9hzrj" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.903744 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzmnk\" (UniqueName: \"kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk\") pod \"octavia-db-create-9hzrj\" (UID: \"ada7d5e9-569f-48b8-94c9-f7bad43412d2\") " pod="openstack/octavia-db-create-9hzrj" Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.955236 4762 generic.go:334] "Generic (PLEG): container finished" podID="7f289c54-5049-4ded-b960-57885a9525dd" containerID="30a1c33a7d4083808d38f412ee735e25f51b30cc7ba10d3d9fbb20556dd97407" exitCode=0 Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.955301 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7pjmm" event={"ID":"7f289c54-5049-4ded-b960-57885a9525dd","Type":"ContainerDied","Data":"30a1c33a7d4083808d38f412ee735e25f51b30cc7ba10d3d9fbb20556dd97407"} Oct 09 15:04:59 crc kubenswrapper[4762]: I1009 15:04:59.963398 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tcrqk" event={"ID":"347ebf92-b7d3-4407-b556-afe6c5121d88","Type":"ContainerStarted","Data":"4ebc2ecd24e02c09928e4816af9937bf9a00277ce51e31307e0f2b9d900d38cc"} Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.006241 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9hzrj" Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.515982 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-9hzrj"] Oct 09 15:05:00 crc kubenswrapper[4762]: W1009 15:05:00.526147 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podada7d5e9_569f_48b8_94c9_f7bad43412d2.slice/crio-fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28 WatchSource:0}: Error finding container fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28: Status 404 returned error can't find the container with id fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28 Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.984651 4762 generic.go:334] "Generic (PLEG): container finished" podID="ada7d5e9-569f-48b8-94c9-f7bad43412d2" containerID="ea479bf12a2cda7843b77ba1f296d0d80aa847a30a5992e10468e82257f7b79d" exitCode=0 Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995511 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9hzrj" event={"ID":"ada7d5e9-569f-48b8-94c9-f7bad43412d2","Type":"ContainerDied","Data":"ea479bf12a2cda7843b77ba1f296d0d80aa847a30a5992e10468e82257f7b79d"} Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995565 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995576 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9hzrj" event={"ID":"ada7d5e9-569f-48b8-94c9-f7bad43412d2","Type":"ContainerStarted","Data":"fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28"} Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995587 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7pjmm" event={"ID":"7f289c54-5049-4ded-b960-57885a9525dd","Type":"ContainerStarted","Data":"9bc4890b58ccfea59b0788ff351446ceb3377f911c3662f7fb1f7aeb2bc7509e"} Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995597 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7pjmm" event={"ID":"7f289c54-5049-4ded-b960-57885a9525dd","Type":"ContainerStarted","Data":"908b393c1b5267de5be168f2a589c0b39151227af347d2c254f84027eb6c4ca5"} Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995606 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:05:00 crc kubenswrapper[4762]: I1009 15:05:00.995896 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tcrqk" event={"ID":"347ebf92-b7d3-4407-b556-afe6c5121d88","Type":"ContainerStarted","Data":"49ebf00bc4db1ec7b9b2115b3d23ad5a84f1fb82e3fd1aacbb991db8e975cf14"} Oct 09 15:05:01 crc kubenswrapper[4762]: I1009 15:05:01.020505 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-7pjmm" podStartSLOduration=4.020487249 podStartE2EDuration="4.020487249s" podCreationTimestamp="2025-10-09 15:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:05:01.018729864 +0000 UTC m=+5976.792520913" watchObservedRunningTime="2025-10-09 15:05:01.020487249 +0000 UTC m=+5976.794278278" Oct 09 15:05:01 crc kubenswrapper[4762]: I1009 15:05:01.037970 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tcrqk" podStartSLOduration=3.037949063 podStartE2EDuration="3.037949063s" podCreationTimestamp="2025-10-09 15:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:05:01.033791036 +0000 UTC m=+5976.807582085" watchObservedRunningTime="2025-10-09 15:05:01.037949063 +0000 UTC m=+5976.811740112" Oct 09 15:05:02 crc kubenswrapper[4762]: I1009 15:05:02.308431 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9hzrj" Oct 09 15:05:02 crc kubenswrapper[4762]: I1009 15:05:02.429274 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzmnk\" (UniqueName: \"kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk\") pod \"ada7d5e9-569f-48b8-94c9-f7bad43412d2\" (UID: \"ada7d5e9-569f-48b8-94c9-f7bad43412d2\") " Oct 09 15:05:02 crc kubenswrapper[4762]: I1009 15:05:02.439009 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk" (OuterVolumeSpecName: "kube-api-access-vzmnk") pod "ada7d5e9-569f-48b8-94c9-f7bad43412d2" (UID: "ada7d5e9-569f-48b8-94c9-f7bad43412d2"). InnerVolumeSpecName "kube-api-access-vzmnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:02 crc kubenswrapper[4762]: I1009 15:05:02.532012 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzmnk\" (UniqueName: \"kubernetes.io/projected/ada7d5e9-569f-48b8-94c9-f7bad43412d2-kube-api-access-vzmnk\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:03 crc kubenswrapper[4762]: I1009 15:05:03.013554 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9hzrj" event={"ID":"ada7d5e9-569f-48b8-94c9-f7bad43412d2","Type":"ContainerDied","Data":"fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28"} Oct 09 15:05:03 crc kubenswrapper[4762]: I1009 15:05:03.013824 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9hzrj" Oct 09 15:05:03 crc kubenswrapper[4762]: I1009 15:05:03.013839 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdf1732c2120cc1697b4dd4d7c7ef8dc37d1ce791b7aa0ce6861e559a68e7d28" Oct 09 15:05:11 crc kubenswrapper[4762]: I1009 15:05:11.969416 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:05:11 crc kubenswrapper[4762]: I1009 15:05:11.970016 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.813684 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-cc4e-account-create-twt7d"] Oct 09 15:05:12 crc kubenswrapper[4762]: E1009 15:05:12.814218 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada7d5e9-569f-48b8-94c9-f7bad43412d2" containerName="mariadb-database-create" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.814243 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada7d5e9-569f-48b8-94c9-f7bad43412d2" containerName="mariadb-database-create" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.814510 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ada7d5e9-569f-48b8-94c9-f7bad43412d2" containerName="mariadb-database-create" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.815364 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.818544 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.822974 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-cc4e-account-create-twt7d"] Oct 09 15:05:12 crc kubenswrapper[4762]: I1009 15:05:12.936241 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9kxn\" (UniqueName: \"kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn\") pod \"octavia-cc4e-account-create-twt7d\" (UID: \"a6762a3f-8854-4975-a726-e82b92256895\") " pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:13 crc kubenswrapper[4762]: I1009 15:05:13.039403 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9kxn\" (UniqueName: \"kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn\") pod \"octavia-cc4e-account-create-twt7d\" (UID: \"a6762a3f-8854-4975-a726-e82b92256895\") " pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:13 crc kubenswrapper[4762]: I1009 15:05:13.061164 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9kxn\" (UniqueName: \"kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn\") pod \"octavia-cc4e-account-create-twt7d\" (UID: \"a6762a3f-8854-4975-a726-e82b92256895\") " pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:13 crc kubenswrapper[4762]: I1009 15:05:13.148077 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:13 crc kubenswrapper[4762]: I1009 15:05:13.623882 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-cc4e-account-create-twt7d"] Oct 09 15:05:14 crc kubenswrapper[4762]: I1009 15:05:14.139131 4762 generic.go:334] "Generic (PLEG): container finished" podID="a6762a3f-8854-4975-a726-e82b92256895" containerID="c37576a22e1fd9a21099d056236bb751b369c2df294fb807b7ba3ffd2299cfd2" exitCode=0 Oct 09 15:05:14 crc kubenswrapper[4762]: I1009 15:05:14.139189 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-cc4e-account-create-twt7d" event={"ID":"a6762a3f-8854-4975-a726-e82b92256895","Type":"ContainerDied","Data":"c37576a22e1fd9a21099d056236bb751b369c2df294fb807b7ba3ffd2299cfd2"} Oct 09 15:05:14 crc kubenswrapper[4762]: I1009 15:05:14.139234 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-cc4e-account-create-twt7d" event={"ID":"a6762a3f-8854-4975-a726-e82b92256895","Type":"ContainerStarted","Data":"1c5536aa2f9d9fe305c3faf3c9ab4f46072d7a2e17d59d1182dfd4d792630b07"} Oct 09 15:05:15 crc kubenswrapper[4762]: I1009 15:05:15.446205 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:15 crc kubenswrapper[4762]: I1009 15:05:15.589998 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9kxn\" (UniqueName: \"kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn\") pod \"a6762a3f-8854-4975-a726-e82b92256895\" (UID: \"a6762a3f-8854-4975-a726-e82b92256895\") " Oct 09 15:05:15 crc kubenswrapper[4762]: I1009 15:05:15.594844 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn" (OuterVolumeSpecName: "kube-api-access-z9kxn") pod "a6762a3f-8854-4975-a726-e82b92256895" (UID: "a6762a3f-8854-4975-a726-e82b92256895"). InnerVolumeSpecName "kube-api-access-z9kxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:15 crc kubenswrapper[4762]: I1009 15:05:15.692996 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9kxn\" (UniqueName: \"kubernetes.io/projected/a6762a3f-8854-4975-a726-e82b92256895-kube-api-access-z9kxn\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:16 crc kubenswrapper[4762]: I1009 15:05:16.158300 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-cc4e-account-create-twt7d" event={"ID":"a6762a3f-8854-4975-a726-e82b92256895","Type":"ContainerDied","Data":"1c5536aa2f9d9fe305c3faf3c9ab4f46072d7a2e17d59d1182dfd4d792630b07"} Oct 09 15:05:16 crc kubenswrapper[4762]: I1009 15:05:16.158360 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-cc4e-account-create-twt7d" Oct 09 15:05:16 crc kubenswrapper[4762]: I1009 15:05:16.158375 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c5536aa2f9d9fe305c3faf3c9ab4f46072d7a2e17d59d1182dfd4d792630b07" Oct 09 15:05:18 crc kubenswrapper[4762]: I1009 15:05:18.882708 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-nc8ls"] Oct 09 15:05:18 crc kubenswrapper[4762]: E1009 15:05:18.884482 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6762a3f-8854-4975-a726-e82b92256895" containerName="mariadb-account-create" Oct 09 15:05:18 crc kubenswrapper[4762]: I1009 15:05:18.884516 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6762a3f-8854-4975-a726-e82b92256895" containerName="mariadb-account-create" Oct 09 15:05:18 crc kubenswrapper[4762]: I1009 15:05:18.885327 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6762a3f-8854-4975-a726-e82b92256895" containerName="mariadb-account-create" Oct 09 15:05:18 crc kubenswrapper[4762]: I1009 15:05:18.886703 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:18 crc kubenswrapper[4762]: I1009 15:05:18.962239 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfb2j\" (UniqueName: \"kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j\") pod \"octavia-persistence-db-create-nc8ls\" (UID: \"a6e5304c-1aea-4e8c-9211-0f001f520cbe\") " pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:19 crc kubenswrapper[4762]: I1009 15:05:19.012423 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-nc8ls"] Oct 09 15:05:19 crc kubenswrapper[4762]: I1009 15:05:19.065015 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfb2j\" (UniqueName: \"kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j\") pod \"octavia-persistence-db-create-nc8ls\" (UID: \"a6e5304c-1aea-4e8c-9211-0f001f520cbe\") " pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:19 crc kubenswrapper[4762]: I1009 15:05:19.091444 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfb2j\" (UniqueName: \"kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j\") pod \"octavia-persistence-db-create-nc8ls\" (UID: \"a6e5304c-1aea-4e8c-9211-0f001f520cbe\") " pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:19 crc kubenswrapper[4762]: I1009 15:05:19.228379 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:19 crc kubenswrapper[4762]: W1009 15:05:19.713876 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6e5304c_1aea_4e8c_9211_0f001f520cbe.slice/crio-4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994 WatchSource:0}: Error finding container 4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994: Status 404 returned error can't find the container with id 4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994 Oct 09 15:05:19 crc kubenswrapper[4762]: I1009 15:05:19.731560 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-nc8ls"] Oct 09 15:05:20 crc kubenswrapper[4762]: I1009 15:05:20.199392 4762 generic.go:334] "Generic (PLEG): container finished" podID="a6e5304c-1aea-4e8c-9211-0f001f520cbe" containerID="270f067d8caa967c131fa9e1dcdc8d537699fc5e7f182152cbc155705521729a" exitCode=0 Oct 09 15:05:20 crc kubenswrapper[4762]: I1009 15:05:20.199439 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-nc8ls" event={"ID":"a6e5304c-1aea-4e8c-9211-0f001f520cbe","Type":"ContainerDied","Data":"270f067d8caa967c131fa9e1dcdc8d537699fc5e7f182152cbc155705521729a"} Oct 09 15:05:20 crc kubenswrapper[4762]: I1009 15:05:20.199463 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-nc8ls" event={"ID":"a6e5304c-1aea-4e8c-9211-0f001f520cbe","Type":"ContainerStarted","Data":"4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994"} Oct 09 15:05:21 crc kubenswrapper[4762]: I1009 15:05:21.570206 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:21 crc kubenswrapper[4762]: I1009 15:05:21.716977 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfb2j\" (UniqueName: \"kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j\") pod \"a6e5304c-1aea-4e8c-9211-0f001f520cbe\" (UID: \"a6e5304c-1aea-4e8c-9211-0f001f520cbe\") " Oct 09 15:05:21 crc kubenswrapper[4762]: I1009 15:05:21.721868 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j" (OuterVolumeSpecName: "kube-api-access-kfb2j") pod "a6e5304c-1aea-4e8c-9211-0f001f520cbe" (UID: "a6e5304c-1aea-4e8c-9211-0f001f520cbe"). InnerVolumeSpecName "kube-api-access-kfb2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:21 crc kubenswrapper[4762]: I1009 15:05:21.819850 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfb2j\" (UniqueName: \"kubernetes.io/projected/a6e5304c-1aea-4e8c-9211-0f001f520cbe-kube-api-access-kfb2j\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:22 crc kubenswrapper[4762]: I1009 15:05:22.219138 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-nc8ls" event={"ID":"a6e5304c-1aea-4e8c-9211-0f001f520cbe","Type":"ContainerDied","Data":"4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994"} Oct 09 15:05:22 crc kubenswrapper[4762]: I1009 15:05:22.219179 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bbb80b39686493a2daadd604b0e097328e9f7b3f20d162807fd2ba649b5d994" Oct 09 15:05:22 crc kubenswrapper[4762]: I1009 15:05:22.219179 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-nc8ls" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.040855 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-c94a-account-create-86xpl"] Oct 09 15:05:30 crc kubenswrapper[4762]: E1009 15:05:30.042998 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6e5304c-1aea-4e8c-9211-0f001f520cbe" containerName="mariadb-database-create" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.043098 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6e5304c-1aea-4e8c-9211-0f001f520cbe" containerName="mariadb-database-create" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.043394 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6e5304c-1aea-4e8c-9211-0f001f520cbe" containerName="mariadb-database-create" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.044264 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.047653 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.058398 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c94a-account-create-86xpl"] Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.177751 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pf9m\" (UniqueName: \"kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m\") pod \"octavia-c94a-account-create-86xpl\" (UID: \"ac9a0b13-d6a5-4ec1-8277-c76d624988fb\") " pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.280108 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pf9m\" (UniqueName: \"kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m\") pod \"octavia-c94a-account-create-86xpl\" (UID: \"ac9a0b13-d6a5-4ec1-8277-c76d624988fb\") " pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.308220 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pf9m\" (UniqueName: \"kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m\") pod \"octavia-c94a-account-create-86xpl\" (UID: \"ac9a0b13-d6a5-4ec1-8277-c76d624988fb\") " pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.403557 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:30 crc kubenswrapper[4762]: I1009 15:05:30.862096 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c94a-account-create-86xpl"] Oct 09 15:05:31 crc kubenswrapper[4762]: I1009 15:05:31.308614 4762 generic.go:334] "Generic (PLEG): container finished" podID="ac9a0b13-d6a5-4ec1-8277-c76d624988fb" containerID="be530af523124f4655c6e0b7eb7bb440dbfcabe2dc0ef39f909c7224d80df346" exitCode=0 Oct 09 15:05:31 crc kubenswrapper[4762]: I1009 15:05:31.308675 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c94a-account-create-86xpl" event={"ID":"ac9a0b13-d6a5-4ec1-8277-c76d624988fb","Type":"ContainerDied","Data":"be530af523124f4655c6e0b7eb7bb440dbfcabe2dc0ef39f909c7224d80df346"} Oct 09 15:05:31 crc kubenswrapper[4762]: I1009 15:05:31.308708 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c94a-account-create-86xpl" event={"ID":"ac9a0b13-d6a5-4ec1-8277-c76d624988fb","Type":"ContainerStarted","Data":"79eff117ccf7d5b3b87fc4ba44d735c4b278c3c1dd6674cda404f10b8d4eb839"} Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.580590 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jphs8" podUID="b14659e8-2771-4473-8ea7-a0d598c1030a" containerName="ovn-controller" probeResult="failure" output=< Oct 09 15:05:32 crc kubenswrapper[4762]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 09 15:05:32 crc kubenswrapper[4762]: > Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.597320 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.598266 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7pjmm" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.669865 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.717468 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jphs8-config-q97pr"] Oct 09 15:05:32 crc kubenswrapper[4762]: E1009 15:05:32.717892 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac9a0b13-d6a5-4ec1-8277-c76d624988fb" containerName="mariadb-account-create" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.717903 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac9a0b13-d6a5-4ec1-8277-c76d624988fb" containerName="mariadb-account-create" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.718104 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac9a0b13-d6a5-4ec1-8277-c76d624988fb" containerName="mariadb-account-create" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.718766 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.721942 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.727457 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pf9m\" (UniqueName: \"kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m\") pod \"ac9a0b13-d6a5-4ec1-8277-c76d624988fb\" (UID: \"ac9a0b13-d6a5-4ec1-8277-c76d624988fb\") " Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.736840 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m" (OuterVolumeSpecName: "kube-api-access-5pf9m") pod "ac9a0b13-d6a5-4ec1-8277-c76d624988fb" (UID: "ac9a0b13-d6a5-4ec1-8277-c76d624988fb"). InnerVolumeSpecName "kube-api-access-5pf9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.736841 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8-config-q97pr"] Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830153 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830208 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2tcz\" (UniqueName: \"kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830277 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830319 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830371 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830416 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.830503 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pf9m\" (UniqueName: \"kubernetes.io/projected/ac9a0b13-d6a5-4ec1-8277-c76d624988fb-kube-api-access-5pf9m\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932104 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932167 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932238 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932289 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932337 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932358 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2tcz\" (UniqueName: \"kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932909 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932951 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932950 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.932950 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.934432 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:32 crc kubenswrapper[4762]: I1009 15:05:32.952406 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2tcz\" (UniqueName: \"kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz\") pod \"ovn-controller-jphs8-config-q97pr\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:33 crc kubenswrapper[4762]: I1009 15:05:33.084868 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:33 crc kubenswrapper[4762]: I1009 15:05:33.328103 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c94a-account-create-86xpl" Oct 09 15:05:33 crc kubenswrapper[4762]: I1009 15:05:33.328087 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c94a-account-create-86xpl" event={"ID":"ac9a0b13-d6a5-4ec1-8277-c76d624988fb","Type":"ContainerDied","Data":"79eff117ccf7d5b3b87fc4ba44d735c4b278c3c1dd6674cda404f10b8d4eb839"} Oct 09 15:05:33 crc kubenswrapper[4762]: I1009 15:05:33.328892 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79eff117ccf7d5b3b87fc4ba44d735c4b278c3c1dd6674cda404f10b8d4eb839" Oct 09 15:05:33 crc kubenswrapper[4762]: W1009 15:05:33.528532 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7acfda05_1a10_4495_ad90_8977bea27683.slice/crio-e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea WatchSource:0}: Error finding container e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea: Status 404 returned error can't find the container with id e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea Oct 09 15:05:33 crc kubenswrapper[4762]: I1009 15:05:33.532525 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8-config-q97pr"] Oct 09 15:05:34 crc kubenswrapper[4762]: I1009 15:05:34.337915 4762 generic.go:334] "Generic (PLEG): container finished" podID="7acfda05-1a10-4495-ad90-8977bea27683" containerID="c6cd31d9581e951d64f5fefcb58c1bd8dbc0109ae37cdf49d5e929f69962296f" exitCode=0 Oct 09 15:05:34 crc kubenswrapper[4762]: I1009 15:05:34.338023 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-q97pr" event={"ID":"7acfda05-1a10-4495-ad90-8977bea27683","Type":"ContainerDied","Data":"c6cd31d9581e951d64f5fefcb58c1bd8dbc0109ae37cdf49d5e929f69962296f"} Oct 09 15:05:34 crc kubenswrapper[4762]: I1009 15:05:34.338204 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-q97pr" event={"ID":"7acfda05-1a10-4495-ad90-8977bea27683","Type":"ContainerStarted","Data":"e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea"} Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.703976 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.785591 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.785725 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.785840 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.785943 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run" (OuterVolumeSpecName: "var-run") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.785950 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2tcz\" (UniqueName: \"kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786015 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn\") pod \"7acfda05-1a10-4495-ad90-8977bea27683\" (UID: \"7acfda05-1a10-4495-ad90-8977bea27683\") " Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786113 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786195 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786708 4762 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786729 4762 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.786749 4762 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7acfda05-1a10-4495-ad90-8977bea27683-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.787300 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts" (OuterVolumeSpecName: "scripts") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.787786 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.794190 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz" (OuterVolumeSpecName: "kube-api-access-w2tcz") pod "7acfda05-1a10-4495-ad90-8977bea27683" (UID: "7acfda05-1a10-4495-ad90-8977bea27683"). InnerVolumeSpecName "kube-api-access-w2tcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.888394 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2tcz\" (UniqueName: \"kubernetes.io/projected/7acfda05-1a10-4495-ad90-8977bea27683-kube-api-access-w2tcz\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.888440 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:35 crc kubenswrapper[4762]: I1009 15:05:35.888453 4762 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7acfda05-1a10-4495-ad90-8977bea27683-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.360361 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-q97pr" event={"ID":"7acfda05-1a10-4495-ad90-8977bea27683","Type":"ContainerDied","Data":"e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea"} Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.360410 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e146dea1a3107e1d3688a53e17bfa97fc12a6ecd29ea244dd1b7c6262c927eea" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.360456 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-q97pr" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.828832 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jphs8-config-q97pr"] Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.841069 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jphs8-config-q97pr"] Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.953673 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jphs8-config-7dlzm"] Oct 09 15:05:36 crc kubenswrapper[4762]: E1009 15:05:36.954088 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7acfda05-1a10-4495-ad90-8977bea27683" containerName="ovn-config" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.954103 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7acfda05-1a10-4495-ad90-8977bea27683" containerName="ovn-config" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.954318 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7acfda05-1a10-4495-ad90-8977bea27683" containerName="ovn-config" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.955015 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.959269 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.983403 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7acfda05-1a10-4495-ad90-8977bea27683" path="/var/lib/kubelet/pods/7acfda05-1a10-4495-ad90-8977bea27683/volumes" Oct 09 15:05:36 crc kubenswrapper[4762]: I1009 15:05:36.983977 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8-config-7dlzm"] Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.020973 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5l72\" (UniqueName: \"kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.021406 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.021494 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.021711 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.021750 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.021802 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.058875 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-6fdffd4774-9qpx5"] Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.060976 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.067911 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.068022 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-l4zct" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.068276 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.084679 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6fdffd4774-9qpx5"] Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123216 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123300 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-config-data\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123357 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123447 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123473 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-combined-ca-bundle\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123499 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123541 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123577 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-config-data-merged\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.123603 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-scripts\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.124254 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-octavia-run\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.124295 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5l72\" (UniqueName: \"kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.124321 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.124445 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.124513 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.126786 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.129174 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.145652 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5l72\" (UniqueName: \"kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72\") pod \"ovn-controller-jphs8-config-7dlzm\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.225367 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-config-data\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.225510 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-combined-ca-bundle\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.225569 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-config-data-merged\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.225587 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-scripts\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.225607 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-octavia-run\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.226103 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-octavia-run\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.226854 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/824b22e5-15dc-4560-be38-879a39e3175f-config-data-merged\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.229291 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-config-data\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.230914 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-combined-ca-bundle\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.231387 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/824b22e5-15dc-4560-be38-879a39e3175f-scripts\") pod \"octavia-api-6fdffd4774-9qpx5\" (UID: \"824b22e5-15dc-4560-be38-879a39e3175f\") " pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.281341 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.383094 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.585812 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jphs8" Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.734765 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jphs8-config-7dlzm"] Oct 09 15:05:37 crc kubenswrapper[4762]: W1009 15:05:37.737385 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda81dbbbf_52e2_48ba_859c_420f3549833d.slice/crio-64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8 WatchSource:0}: Error finding container 64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8: Status 404 returned error can't find the container with id 64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8 Oct 09 15:05:37 crc kubenswrapper[4762]: W1009 15:05:37.931917 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod824b22e5_15dc_4560_be38_879a39e3175f.slice/crio-a48be05dd95c7ed326ab51f0bb9e32dd113406e945ad85d83a298654d34a2fb9 WatchSource:0}: Error finding container a48be05dd95c7ed326ab51f0bb9e32dd113406e945ad85d83a298654d34a2fb9: Status 404 returned error can't find the container with id a48be05dd95c7ed326ab51f0bb9e32dd113406e945ad85d83a298654d34a2fb9 Oct 09 15:05:37 crc kubenswrapper[4762]: I1009 15:05:37.932388 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6fdffd4774-9qpx5"] Oct 09 15:05:38 crc kubenswrapper[4762]: I1009 15:05:38.381998 4762 generic.go:334] "Generic (PLEG): container finished" podID="a81dbbbf-52e2-48ba-859c-420f3549833d" containerID="72b64cc39ceea069396f8934db28b6d909b2efe6402843ec664d576a17929d72" exitCode=0 Oct 09 15:05:38 crc kubenswrapper[4762]: I1009 15:05:38.382095 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-7dlzm" event={"ID":"a81dbbbf-52e2-48ba-859c-420f3549833d","Type":"ContainerDied","Data":"72b64cc39ceea069396f8934db28b6d909b2efe6402843ec664d576a17929d72"} Oct 09 15:05:38 crc kubenswrapper[4762]: I1009 15:05:38.382337 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-7dlzm" event={"ID":"a81dbbbf-52e2-48ba-859c-420f3549833d","Type":"ContainerStarted","Data":"64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8"} Oct 09 15:05:38 crc kubenswrapper[4762]: I1009 15:05:38.384406 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6fdffd4774-9qpx5" event={"ID":"824b22e5-15dc-4560-be38-879a39e3175f","Type":"ContainerStarted","Data":"a48be05dd95c7ed326ab51f0bb9e32dd113406e945ad85d83a298654d34a2fb9"} Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.941801 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993237 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993296 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993322 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993349 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993380 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5l72\" (UniqueName: \"kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts\") pod \"a81dbbbf-52e2-48ba-859c-420f3549833d\" (UID: \"a81dbbbf-52e2-48ba-859c-420f3549833d\") " Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.993843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run" (OuterVolumeSpecName: "var-run") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.994265 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.994724 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.995076 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.995118 4762 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.995160 4762 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.995171 4762 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a81dbbbf-52e2-48ba-859c-420f3549833d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:39 crc kubenswrapper[4762]: I1009 15:05:39.995383 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts" (OuterVolumeSpecName: "scripts") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.018600 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72" (OuterVolumeSpecName: "kube-api-access-r5l72") pod "a81dbbbf-52e2-48ba-859c-420f3549833d" (UID: "a81dbbbf-52e2-48ba-859c-420f3549833d"). InnerVolumeSpecName "kube-api-access-r5l72". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.096823 4762 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.096871 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a81dbbbf-52e2-48ba-859c-420f3549833d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.096883 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5l72\" (UniqueName: \"kubernetes.io/projected/a81dbbbf-52e2-48ba-859c-420f3549833d-kube-api-access-r5l72\") on node \"crc\" DevicePath \"\"" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.403566 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jphs8-config-7dlzm" event={"ID":"a81dbbbf-52e2-48ba-859c-420f3549833d","Type":"ContainerDied","Data":"64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8"} Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.403944 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64a9f1c06d94380b7bd37f923a5b1e6286b8384aeb278bdc61570a2a672c17e8" Oct 09 15:05:40 crc kubenswrapper[4762]: I1009 15:05:40.404039 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jphs8-config-7dlzm" Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.024053 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jphs8-config-7dlzm"] Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.034155 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jphs8-config-7dlzm"] Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.969291 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.969383 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.969451 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.971047 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:05:41 crc kubenswrapper[4762]: I1009 15:05:41.971135 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" gracePeriod=600 Oct 09 15:05:42 crc kubenswrapper[4762]: I1009 15:05:42.427987 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" exitCode=0 Oct 09 15:05:42 crc kubenswrapper[4762]: I1009 15:05:42.428101 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38"} Oct 09 15:05:42 crc kubenswrapper[4762]: I1009 15:05:42.428313 4762 scope.go:117] "RemoveContainer" containerID="1a0181075c60fb6cad88970ff9a288267c74d0e4b2ca24f09d7ac8986dc7d5ce" Oct 09 15:05:42 crc kubenswrapper[4762]: I1009 15:05:42.979210 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a81dbbbf-52e2-48ba-859c-420f3549833d" path="/var/lib/kubelet/pods/a81dbbbf-52e2-48ba-859c-420f3549833d/volumes" Oct 09 15:05:46 crc kubenswrapper[4762]: E1009 15:05:46.537918 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:05:47 crc kubenswrapper[4762]: I1009 15:05:47.479677 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:05:47 crc kubenswrapper[4762]: I1009 15:05:47.480673 4762 generic.go:334] "Generic (PLEG): container finished" podID="824b22e5-15dc-4560-be38-879a39e3175f" containerID="1fde2f9781eb4b84affd63969694e2cd5a4f4ac305a5598641957177bc9c18e8" exitCode=0 Oct 09 15:05:47 crc kubenswrapper[4762]: I1009 15:05:47.480720 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6fdffd4774-9qpx5" event={"ID":"824b22e5-15dc-4560-be38-879a39e3175f","Type":"ContainerDied","Data":"1fde2f9781eb4b84affd63969694e2cd5a4f4ac305a5598641957177bc9c18e8"} Oct 09 15:05:47 crc kubenswrapper[4762]: E1009 15:05:47.480904 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:05:48 crc kubenswrapper[4762]: I1009 15:05:48.491807 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6fdffd4774-9qpx5" event={"ID":"824b22e5-15dc-4560-be38-879a39e3175f","Type":"ContainerStarted","Data":"13e83aa7b25bedead63f59bb532e500b8945e947746d85bf397f89da998db90f"} Oct 09 15:05:48 crc kubenswrapper[4762]: I1009 15:05:48.493148 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6fdffd4774-9qpx5" event={"ID":"824b22e5-15dc-4560-be38-879a39e3175f","Type":"ContainerStarted","Data":"fd7ff18f245e9ba73c6fdf7d26b7bde9485166400e04f6b89910a94cbb6d8d0b"} Oct 09 15:05:48 crc kubenswrapper[4762]: I1009 15:05:48.493290 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:48 crc kubenswrapper[4762]: I1009 15:05:48.493393 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:48 crc kubenswrapper[4762]: I1009 15:05:48.523752 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-6fdffd4774-9qpx5" podStartSLOduration=2.859277014 podStartE2EDuration="11.523729527s" podCreationTimestamp="2025-10-09 15:05:37 +0000 UTC" firstStartedPulling="2025-10-09 15:05:37.937249017 +0000 UTC m=+6013.711040056" lastFinishedPulling="2025-10-09 15:05:46.60170152 +0000 UTC m=+6022.375492569" observedRunningTime="2025-10-09 15:05:48.51651676 +0000 UTC m=+6024.290307799" watchObservedRunningTime="2025-10-09 15:05:48.523729527 +0000 UTC m=+6024.297520566" Oct 09 15:05:56 crc kubenswrapper[4762]: I1009 15:05:56.640097 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:56 crc kubenswrapper[4762]: I1009 15:05:56.932982 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6fdffd4774-9qpx5" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.458117 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-j9l4k"] Oct 09 15:05:59 crc kubenswrapper[4762]: E1009 15:05:59.458839 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a81dbbbf-52e2-48ba-859c-420f3549833d" containerName="ovn-config" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.458858 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a81dbbbf-52e2-48ba-859c-420f3549833d" containerName="ovn-config" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.459044 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a81dbbbf-52e2-48ba-859c-420f3549833d" containerName="ovn-config" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.460096 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.462199 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.464610 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.466140 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.480826 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-j9l4k"] Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.526276 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data-merged\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.526338 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-scripts\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.526389 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/417ca96a-987f-4daf-aacf-1115ce687ca3-hm-ports\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.526525 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.629688 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data-merged\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.629756 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-scripts\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.629800 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/417ca96a-987f-4daf-aacf-1115ce687ca3-hm-ports\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.629836 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.631026 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data-merged\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.631811 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/417ca96a-987f-4daf-aacf-1115ce687ca3-hm-ports\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.637361 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-config-data\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.638024 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/417ca96a-987f-4daf-aacf-1115ce687ca3-scripts\") pod \"octavia-rsyslog-j9l4k\" (UID: \"417ca96a-987f-4daf-aacf-1115ce687ca3\") " pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:05:59 crc kubenswrapper[4762]: I1009 15:05:59.796753 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.380004 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-j9l4k"] Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.540617 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.542903 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.546618 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.584861 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.628258 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-j9l4k" event={"ID":"417ca96a-987f-4daf-aacf-1115ce687ca3","Type":"ContainerStarted","Data":"8c271ca278b7930280f240ed93cebe90ab4a04515161ee1c4f6d6519ce9b4013"} Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.648898 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.649192 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.752758 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.752820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.753251 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.759137 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config\") pod \"octavia-image-upload-59f8cff499-c7zrf\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:00 crc kubenswrapper[4762]: I1009 15:06:00.881759 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:06:01 crc kubenswrapper[4762]: I1009 15:06:01.414479 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:06:01 crc kubenswrapper[4762]: W1009 15:06:01.418863 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod305ad0a3_36f8_4b79_a031_e6f0bec6f888.slice/crio-240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e WatchSource:0}: Error finding container 240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e: Status 404 returned error can't find the container with id 240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e Oct 09 15:06:01 crc kubenswrapper[4762]: I1009 15:06:01.640474 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerStarted","Data":"240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e"} Oct 09 15:06:01 crc kubenswrapper[4762]: I1009 15:06:01.965904 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:06:01 crc kubenswrapper[4762]: E1009 15:06:01.967150 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:06:02 crc kubenswrapper[4762]: I1009 15:06:02.665964 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-j9l4k" event={"ID":"417ca96a-987f-4daf-aacf-1115ce687ca3","Type":"ContainerStarted","Data":"df2b000c5747579d1b7991f0a36738848f3abe7c44881a83f6b824c6cae2026a"} Oct 09 15:06:05 crc kubenswrapper[4762]: I1009 15:06:05.705914 4762 generic.go:334] "Generic (PLEG): container finished" podID="417ca96a-987f-4daf-aacf-1115ce687ca3" containerID="df2b000c5747579d1b7991f0a36738848f3abe7c44881a83f6b824c6cae2026a" exitCode=0 Oct 09 15:06:05 crc kubenswrapper[4762]: I1009 15:06:05.706490 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-j9l4k" event={"ID":"417ca96a-987f-4daf-aacf-1115ce687ca3","Type":"ContainerDied","Data":"df2b000c5747579d1b7991f0a36738848f3abe7c44881a83f6b824c6cae2026a"} Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.571684 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-ws564"] Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.585308 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.596119 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.596689 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.596873 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.597242 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-ws564"] Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726585 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-config-data\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726725 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4369b0bf-d12c-4398-b07f-554fef20c094-hm-ports\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726799 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-amphora-certs\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726867 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-scripts\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726904 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4369b0bf-d12c-4398-b07f-554fef20c094-config-data-merged\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.726953 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-combined-ca-bundle\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.827864 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-amphora-certs\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.827939 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-scripts\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.827971 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4369b0bf-d12c-4398-b07f-554fef20c094-config-data-merged\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.828423 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4369b0bf-d12c-4398-b07f-554fef20c094-config-data-merged\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.829219 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-combined-ca-bundle\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.829623 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-config-data\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.829724 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4369b0bf-d12c-4398-b07f-554fef20c094-hm-ports\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.830809 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/4369b0bf-d12c-4398-b07f-554fef20c094-hm-ports\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.834785 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-amphora-certs\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.835210 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-scripts\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.835893 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-combined-ca-bundle\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.835892 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4369b0bf-d12c-4398-b07f-554fef20c094-config-data\") pod \"octavia-healthmanager-ws564\" (UID: \"4369b0bf-d12c-4398-b07f-554fef20c094\") " pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:06 crc kubenswrapper[4762]: I1009 15:06:06.913294 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:07 crc kubenswrapper[4762]: I1009 15:06:07.567355 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-ws564"] Oct 09 15:06:07 crc kubenswrapper[4762]: I1009 15:06:07.730241 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-ws564" event={"ID":"4369b0bf-d12c-4398-b07f-554fef20c094","Type":"ContainerStarted","Data":"3e97f0426b8e45324caa73886d794aa5fc324eda5162403628802aaeae79903a"} Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.365117 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-czjdc"] Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.367484 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.370485 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.370711 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.398008 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-czjdc"] Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472075 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472139 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3fe23699-5018-4d7a-8f7f-102303da2fef-hm-ports\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472285 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-amphora-certs\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472392 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-combined-ca-bundle\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472436 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data-merged\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.472529 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-scripts\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574314 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-amphora-certs\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574383 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-combined-ca-bundle\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574409 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data-merged\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574453 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-scripts\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574577 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.574615 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3fe23699-5018-4d7a-8f7f-102303da2fef-hm-ports\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.575876 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data-merged\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.576380 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/3fe23699-5018-4d7a-8f7f-102303da2fef-hm-ports\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.580739 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-combined-ca-bundle\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.580812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-amphora-certs\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.580909 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-config-data\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.580959 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe23699-5018-4d7a-8f7f-102303da2fef-scripts\") pod \"octavia-housekeeping-czjdc\" (UID: \"3fe23699-5018-4d7a-8f7f-102303da2fef\") " pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.697264 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:08 crc kubenswrapper[4762]: I1009 15:06:08.742914 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-ws564" event={"ID":"4369b0bf-d12c-4398-b07f-554fef20c094","Type":"ContainerStarted","Data":"75e3dee4cbab2b374b8a5dde1c6fbb0d97c71f1756ce724e50d425879aa6fadc"} Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.313199 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-xp9xs"] Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.315968 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.321585 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.344415 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-xp9xs"] Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.413010 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.413286 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.413329 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.413455 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.515400 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.515532 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.515586 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.515658 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.516106 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.522556 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.523914 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.538554 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data\") pod \"octavia-db-sync-xp9xs\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.655105 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.762851 4762 generic.go:334] "Generic (PLEG): container finished" podID="4369b0bf-d12c-4398-b07f-554fef20c094" containerID="75e3dee4cbab2b374b8a5dde1c6fbb0d97c71f1756ce724e50d425879aa6fadc" exitCode=0 Oct 09 15:06:10 crc kubenswrapper[4762]: I1009 15:06:10.762905 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-ws564" event={"ID":"4369b0bf-d12c-4398-b07f-554fef20c094","Type":"ContainerDied","Data":"75e3dee4cbab2b374b8a5dde1c6fbb0d97c71f1756ce724e50d425879aa6fadc"} Oct 09 15:06:13 crc kubenswrapper[4762]: I1009 15:06:13.965408 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:06:13 crc kubenswrapper[4762]: E1009 15:06:13.966120 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.256756 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-rdxn6"] Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.259073 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.262449 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.262754 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.266403 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-rdxn6"] Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425035 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/2710d4c1-ab14-4206-b148-b5347d99f703-hm-ports\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425123 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2710d4c1-ab14-4206-b148-b5347d99f703-config-data-merged\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425196 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-config-data\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425470 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-combined-ca-bundle\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425617 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-amphora-certs\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.425677 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-scripts\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.542732 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-combined-ca-bundle\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.542810 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-amphora-certs\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.542832 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-scripts\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.542943 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/2710d4c1-ab14-4206-b148-b5347d99f703-hm-ports\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.543747 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2710d4c1-ab14-4206-b148-b5347d99f703-config-data-merged\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.543781 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-config-data\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.544239 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/2710d4c1-ab14-4206-b148-b5347d99f703-config-data-merged\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.544486 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/2710d4c1-ab14-4206-b148-b5347d99f703-hm-ports\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.552226 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-combined-ca-bundle\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.559954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-scripts\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.567684 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-config-data\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.568664 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/2710d4c1-ab14-4206-b148-b5347d99f703-amphora-certs\") pod \"octavia-worker-rdxn6\" (UID: \"2710d4c1-ab14-4206-b148-b5347d99f703\") " pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:15 crc kubenswrapper[4762]: I1009 15:06:15.590187 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:16 crc kubenswrapper[4762]: E1009 15:06:16.370757 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/gthiemonge/octavia-amphora-image:latest" Oct 09 15:06:16 crc kubenswrapper[4762]: E1009 15:06:16.371246 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/gthiemonge/octavia-amphora-image,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:DEST_DIR,Value:/usr/local/apache2/htdocs,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:amphora-image,ReadOnly:false,MountPath:/usr/local/apache2/htdocs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-image-upload-59f8cff499-c7zrf_openstack(305ad0a3-36f8-4b79-a031-e6f0bec6f888): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 15:06:16 crc kubenswrapper[4762]: E1009 15:06:16.373689 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.662987 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-czjdc"] Oct 09 15:06:16 crc kubenswrapper[4762]: W1009 15:06:16.670083 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fe23699_5018_4d7a_8f7f_102303da2fef.slice/crio-7c15eeb9f06a201a24c35ce3f6bb9c8f585d21a13cdc21d8e9cd420d7c4f2259 WatchSource:0}: Error finding container 7c15eeb9f06a201a24c35ce3f6bb9c8f585d21a13cdc21d8e9cd420d7c4f2259: Status 404 returned error can't find the container with id 7c15eeb9f06a201a24c35ce3f6bb9c8f585d21a13cdc21d8e9cd420d7c4f2259 Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.724714 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-xp9xs"] Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.834441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-j9l4k" event={"ID":"417ca96a-987f-4daf-aacf-1115ce687ca3","Type":"ContainerStarted","Data":"f60a00f931055c21f059c1fc79bee5ec5414a44e55eb15aa6ad5935078b13092"} Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.835771 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.837575 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-czjdc" event={"ID":"3fe23699-5018-4d7a-8f7f-102303da2fef","Type":"ContainerStarted","Data":"7c15eeb9f06a201a24c35ce3f6bb9c8f585d21a13cdc21d8e9cd420d7c4f2259"} Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.839695 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-xp9xs" event={"ID":"32e5f6a8-e10a-43f3-a35f-69803d46fdee","Type":"ContainerStarted","Data":"b94a4447c13e728c66ce1d77f84217935fa8bd88af51eebba475fce8c100aa3a"} Oct 09 15:06:16 crc kubenswrapper[4762]: E1009 15:06:16.840793 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/gthiemonge/octavia-amphora-image\\\"\"" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.874923 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-j9l4k" podStartSLOduration=2.160033504 podStartE2EDuration="17.87489633s" podCreationTimestamp="2025-10-09 15:05:59 +0000 UTC" firstStartedPulling="2025-10-09 15:06:00.390426388 +0000 UTC m=+6036.164217427" lastFinishedPulling="2025-10-09 15:06:16.105289214 +0000 UTC m=+6051.879080253" observedRunningTime="2025-10-09 15:06:16.85030176 +0000 UTC m=+6052.624092799" watchObservedRunningTime="2025-10-09 15:06:16.87489633 +0000 UTC m=+6052.648687379" Oct 09 15:06:16 crc kubenswrapper[4762]: I1009 15:06:16.913114 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-rdxn6"] Oct 09 15:06:16 crc kubenswrapper[4762]: W1009 15:06:16.919350 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2710d4c1_ab14_4206_b148_b5347d99f703.slice/crio-1ae79c2d8d4952e7f18883c4a761cd444eeca511ff6bcc802a1cb09293cc9794 WatchSource:0}: Error finding container 1ae79c2d8d4952e7f18883c4a761cd444eeca511ff6bcc802a1cb09293cc9794: Status 404 returned error can't find the container with id 1ae79c2d8d4952e7f18883c4a761cd444eeca511ff6bcc802a1cb09293cc9794 Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.848548 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-rdxn6" event={"ID":"2710d4c1-ab14-4206-b148-b5347d99f703","Type":"ContainerStarted","Data":"1ae79c2d8d4952e7f18883c4a761cd444eeca511ff6bcc802a1cb09293cc9794"} Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.850354 4762 generic.go:334] "Generic (PLEG): container finished" podID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerID="351e2a2a523ed33daf056da115a78fa2f6486907aa98cf76860348785e3c578b" exitCode=0 Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.850467 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-xp9xs" event={"ID":"32e5f6a8-e10a-43f3-a35f-69803d46fdee","Type":"ContainerDied","Data":"351e2a2a523ed33daf056da115a78fa2f6486907aa98cf76860348785e3c578b"} Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.855094 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-ws564" event={"ID":"4369b0bf-d12c-4398-b07f-554fef20c094","Type":"ContainerStarted","Data":"0165cb0edf243e06db5a0c96eddafe113149872daffb29035f775d380f85219b"} Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.855816 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:17 crc kubenswrapper[4762]: I1009 15:06:17.898787 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-ws564" podStartSLOduration=11.898770015 podStartE2EDuration="11.898770015s" podCreationTimestamp="2025-10-09 15:06:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:06:17.886323892 +0000 UTC m=+6053.660114931" watchObservedRunningTime="2025-10-09 15:06:17.898770015 +0000 UTC m=+6053.672561054" Oct 09 15:06:18 crc kubenswrapper[4762]: I1009 15:06:18.875620 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-czjdc" event={"ID":"3fe23699-5018-4d7a-8f7f-102303da2fef","Type":"ContainerStarted","Data":"41f3abed9da43c33ef0bc3a82a74d29c434789ea2518cb66363000c389e05471"} Oct 09 15:06:18 crc kubenswrapper[4762]: I1009 15:06:18.879988 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-xp9xs" event={"ID":"32e5f6a8-e10a-43f3-a35f-69803d46fdee","Type":"ContainerStarted","Data":"1be7fd6d28bed47e86c368413296198b0d95137726284d48aa391358e1e093e2"} Oct 09 15:06:18 crc kubenswrapper[4762]: I1009 15:06:18.918919 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-xp9xs" podStartSLOduration=8.918898724 podStartE2EDuration="8.918898724s" podCreationTimestamp="2025-10-09 15:06:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:06:18.912340863 +0000 UTC m=+6054.686131902" watchObservedRunningTime="2025-10-09 15:06:18.918898724 +0000 UTC m=+6054.692689763" Oct 09 15:06:19 crc kubenswrapper[4762]: I1009 15:06:19.891826 4762 generic.go:334] "Generic (PLEG): container finished" podID="3fe23699-5018-4d7a-8f7f-102303da2fef" containerID="41f3abed9da43c33ef0bc3a82a74d29c434789ea2518cb66363000c389e05471" exitCode=0 Oct 09 15:06:19 crc kubenswrapper[4762]: I1009 15:06:19.892179 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-czjdc" event={"ID":"3fe23699-5018-4d7a-8f7f-102303da2fef","Type":"ContainerDied","Data":"41f3abed9da43c33ef0bc3a82a74d29c434789ea2518cb66363000c389e05471"} Oct 09 15:06:19 crc kubenswrapper[4762]: I1009 15:06:19.897292 4762 generic.go:334] "Generic (PLEG): container finished" podID="2710d4c1-ab14-4206-b148-b5347d99f703" containerID="696a7636e1c1155ff2e6eed53ad26b02d8eddbba31de3f5fb67b3335c1725b2f" exitCode=0 Oct 09 15:06:19 crc kubenswrapper[4762]: I1009 15:06:19.897393 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-rdxn6" event={"ID":"2710d4c1-ab14-4206-b148-b5347d99f703","Type":"ContainerDied","Data":"696a7636e1c1155ff2e6eed53ad26b02d8eddbba31de3f5fb67b3335c1725b2f"} Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.918268 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-czjdc" event={"ID":"3fe23699-5018-4d7a-8f7f-102303da2fef","Type":"ContainerStarted","Data":"f495a975b20f1a05869f7afc39498bfbcf2ea6db11abad551450c38447f04ca9"} Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.919440 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.927112 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-rdxn6" event={"ID":"2710d4c1-ab14-4206-b148-b5347d99f703","Type":"ContainerStarted","Data":"6c93e4aacc65c8db68e2854f385ee7e2edc3db50dbe6deaa76ed70198cf89eaa"} Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.928015 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.943524 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-czjdc" podStartSLOduration=11.253765422 podStartE2EDuration="12.943509923s" podCreationTimestamp="2025-10-09 15:06:08 +0000 UTC" firstStartedPulling="2025-10-09 15:06:16.673213679 +0000 UTC m=+6052.447004718" lastFinishedPulling="2025-10-09 15:06:18.36295818 +0000 UTC m=+6054.136749219" observedRunningTime="2025-10-09 15:06:20.93686793 +0000 UTC m=+6056.710658969" watchObservedRunningTime="2025-10-09 15:06:20.943509923 +0000 UTC m=+6056.717300962" Oct 09 15:06:20 crc kubenswrapper[4762]: I1009 15:06:20.961067 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-rdxn6" podStartSLOduration=4.519910332 podStartE2EDuration="5.96104935s" podCreationTimestamp="2025-10-09 15:06:15 +0000 UTC" firstStartedPulling="2025-10-09 15:06:16.923165257 +0000 UTC m=+6052.696956296" lastFinishedPulling="2025-10-09 15:06:18.364304275 +0000 UTC m=+6054.138095314" observedRunningTime="2025-10-09 15:06:20.95989717 +0000 UTC m=+6056.733688219" watchObservedRunningTime="2025-10-09 15:06:20.96104935 +0000 UTC m=+6056.734840389" Oct 09 15:06:21 crc kubenswrapper[4762]: I1009 15:06:21.938454 4762 generic.go:334] "Generic (PLEG): container finished" podID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerID="1be7fd6d28bed47e86c368413296198b0d95137726284d48aa391358e1e093e2" exitCode=0 Oct 09 15:06:21 crc kubenswrapper[4762]: I1009 15:06:21.938522 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-xp9xs" event={"ID":"32e5f6a8-e10a-43f3-a35f-69803d46fdee","Type":"ContainerDied","Data":"1be7fd6d28bed47e86c368413296198b0d95137726284d48aa391358e1e093e2"} Oct 09 15:06:21 crc kubenswrapper[4762]: I1009 15:06:21.952465 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-ws564" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.328507 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.408387 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data\") pod \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.408882 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts\") pod \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.408920 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged\") pod \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.409173 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle\") pod \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\" (UID: \"32e5f6a8-e10a-43f3-a35f-69803d46fdee\") " Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.417043 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data" (OuterVolumeSpecName: "config-data") pod "32e5f6a8-e10a-43f3-a35f-69803d46fdee" (UID: "32e5f6a8-e10a-43f3-a35f-69803d46fdee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.418964 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts" (OuterVolumeSpecName: "scripts") pod "32e5f6a8-e10a-43f3-a35f-69803d46fdee" (UID: "32e5f6a8-e10a-43f3-a35f-69803d46fdee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.443370 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "32e5f6a8-e10a-43f3-a35f-69803d46fdee" (UID: "32e5f6a8-e10a-43f3-a35f-69803d46fdee"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.449928 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32e5f6a8-e10a-43f3-a35f-69803d46fdee" (UID: "32e5f6a8-e10a-43f3-a35f-69803d46fdee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.511436 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.511472 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.511480 4762 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/32e5f6a8-e10a-43f3-a35f-69803d46fdee-config-data-merged\") on node \"crc\" DevicePath \"\"" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.511492 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e5f6a8-e10a-43f3-a35f-69803d46fdee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.963353 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-xp9xs" event={"ID":"32e5f6a8-e10a-43f3-a35f-69803d46fdee","Type":"ContainerDied","Data":"b94a4447c13e728c66ce1d77f84217935fa8bd88af51eebba475fce8c100aa3a"} Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.963396 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b94a4447c13e728c66ce1d77f84217935fa8bd88af51eebba475fce8c100aa3a" Oct 09 15:06:23 crc kubenswrapper[4762]: I1009 15:06:23.963463 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-xp9xs" Oct 09 15:06:26 crc kubenswrapper[4762]: I1009 15:06:26.965930 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:06:26 crc kubenswrapper[4762]: E1009 15:06:26.967039 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:06:29 crc kubenswrapper[4762]: I1009 15:06:29.832405 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-j9l4k" Oct 09 15:06:30 crc kubenswrapper[4762]: I1009 15:06:30.622235 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-rdxn6" Oct 09 15:06:32 crc kubenswrapper[4762]: I1009 15:06:32.047257 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-f6csc"] Oct 09 15:06:32 crc kubenswrapper[4762]: I1009 15:06:32.057104 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-f6csc"] Oct 09 15:06:32 crc kubenswrapper[4762]: I1009 15:06:32.981383 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6edccc4-0e19-4df1-818f-d78f24277b7a" path="/var/lib/kubelet/pods/f6edccc4-0e19-4df1-818f-d78f24277b7a/volumes" Oct 09 15:06:33 crc kubenswrapper[4762]: I1009 15:06:33.053709 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerStarted","Data":"9ad1ef9522e7c1f08eab2e273435f3dc49772c94ce91212d9dc6c8d1c38e120d"} Oct 09 15:06:33 crc kubenswrapper[4762]: I1009 15:06:33.322877 4762 scope.go:117] "RemoveContainer" containerID="46f0bfbc887f24ecb585feaba4b2e1be8003d0e6c3082ffb0a2b2c2548083d6b" Oct 09 15:06:37 crc kubenswrapper[4762]: I1009 15:06:37.965442 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:06:37 crc kubenswrapper[4762]: E1009 15:06:37.966312 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:06:38 crc kubenswrapper[4762]: I1009 15:06:38.730630 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-czjdc" Oct 09 15:06:41 crc kubenswrapper[4762]: I1009 15:06:41.026360 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-9559-account-create-vwzr8"] Oct 09 15:06:41 crc kubenswrapper[4762]: I1009 15:06:41.034188 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-9559-account-create-vwzr8"] Oct 09 15:06:42 crc kubenswrapper[4762]: I1009 15:06:42.978861 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df1781b9-04ea-4f11-af32-7af6bb2c6f07" path="/var/lib/kubelet/pods/df1781b9-04ea-4f11-af32-7af6bb2c6f07/volumes" Oct 09 15:06:48 crc kubenswrapper[4762]: I1009 15:06:48.969082 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:06:48 crc kubenswrapper[4762]: E1009 15:06:48.969891 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:06:49 crc kubenswrapper[4762]: I1009 15:06:49.058894 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9ccf2"] Oct 09 15:06:49 crc kubenswrapper[4762]: I1009 15:06:49.070707 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9ccf2"] Oct 09 15:06:50 crc kubenswrapper[4762]: I1009 15:06:50.976515 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d" path="/var/lib/kubelet/pods/d6b92ff5-92c1-4a4a-ada1-d17af7ccb82d/volumes" Oct 09 15:06:54 crc kubenswrapper[4762]: I1009 15:06:54.280195 4762 generic.go:334] "Generic (PLEG): container finished" podID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerID="9ad1ef9522e7c1f08eab2e273435f3dc49772c94ce91212d9dc6c8d1c38e120d" exitCode=0 Oct 09 15:06:54 crc kubenswrapper[4762]: I1009 15:06:54.280279 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerDied","Data":"9ad1ef9522e7c1f08eab2e273435f3dc49772c94ce91212d9dc6c8d1c38e120d"} Oct 09 15:06:56 crc kubenswrapper[4762]: I1009 15:06:56.303718 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerStarted","Data":"543196df987c8252cd5630ac25328740c69421308c411e049d84bf7774e4fd3e"} Oct 09 15:06:56 crc kubenswrapper[4762]: I1009 15:06:56.335835 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" podStartSLOduration=1.9843907459999999 podStartE2EDuration="56.335812683s" podCreationTimestamp="2025-10-09 15:06:00 +0000 UTC" firstStartedPulling="2025-10-09 15:06:01.428230326 +0000 UTC m=+6037.202021365" lastFinishedPulling="2025-10-09 15:06:55.779652263 +0000 UTC m=+6091.553443302" observedRunningTime="2025-10-09 15:06:56.319453807 +0000 UTC m=+6092.093244846" watchObservedRunningTime="2025-10-09 15:06:56.335812683 +0000 UTC m=+6092.109603722" Oct 09 15:07:03 crc kubenswrapper[4762]: I1009 15:07:03.965702 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:07:03 crc kubenswrapper[4762]: E1009 15:07:03.966693 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:07:14 crc kubenswrapper[4762]: I1009 15:07:14.972565 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:07:14 crc kubenswrapper[4762]: E1009 15:07:14.973771 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:07:18 crc kubenswrapper[4762]: I1009 15:07:18.047295 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-hxl4x"] Oct 09 15:07:18 crc kubenswrapper[4762]: I1009 15:07:18.057134 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-hxl4x"] Oct 09 15:07:18 crc kubenswrapper[4762]: I1009 15:07:18.949594 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:07:18 crc kubenswrapper[4762]: I1009 15:07:18.950223 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="octavia-amphora-httpd" containerID="cri-o://543196df987c8252cd5630ac25328740c69421308c411e049d84bf7774e4fd3e" gracePeriod=30 Oct 09 15:07:18 crc kubenswrapper[4762]: I1009 15:07:18.980052 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db783ecf-d652-4d2a-9f3f-f24914722aa8" path="/var/lib/kubelet/pods/db783ecf-d652-4d2a-9f3f-f24914722aa8/volumes" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.541654 4762 generic.go:334] "Generic (PLEG): container finished" podID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerID="543196df987c8252cd5630ac25328740c69421308c411e049d84bf7774e4fd3e" exitCode=0 Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.541964 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerDied","Data":"543196df987c8252cd5630ac25328740c69421308c411e049d84bf7774e4fd3e"} Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.541997 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" event={"ID":"305ad0a3-36f8-4b79-a031-e6f0bec6f888","Type":"ContainerDied","Data":"240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e"} Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.542012 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="240bb7238b4d7aed47041cc69050b853fcbe90ab3cf7efc92cba80d5c778647e" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.553206 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.739491 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image\") pod \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.739750 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config\") pod \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\" (UID: \"305ad0a3-36f8-4b79-a031-e6f0bec6f888\") " Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.768022 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "305ad0a3-36f8-4b79-a031-e6f0bec6f888" (UID: "305ad0a3-36f8-4b79-a031-e6f0bec6f888"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.789398 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "305ad0a3-36f8-4b79-a031-e6f0bec6f888" (UID: "305ad0a3-36f8-4b79-a031-e6f0bec6f888"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.843093 4762 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/305ad0a3-36f8-4b79-a031-e6f0bec6f888-amphora-image\") on node \"crc\" DevicePath \"\"" Oct 09 15:07:19 crc kubenswrapper[4762]: I1009 15:07:19.843133 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/305ad0a3-36f8-4b79-a031-e6f0bec6f888-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:07:20 crc kubenswrapper[4762]: I1009 15:07:20.549766 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-c7zrf" Oct 09 15:07:20 crc kubenswrapper[4762]: I1009 15:07:20.583991 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:07:20 crc kubenswrapper[4762]: I1009 15:07:20.592400 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-c7zrf"] Oct 09 15:07:20 crc kubenswrapper[4762]: I1009 15:07:20.980714 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" path="/var/lib/kubelet/pods/305ad0a3-36f8-4b79-a031-e6f0bec6f888/volumes" Oct 09 15:07:25 crc kubenswrapper[4762]: I1009 15:07:25.965850 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:07:25 crc kubenswrapper[4762]: E1009 15:07:25.966475 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.029929 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-11f8-account-create-8r2bz"] Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.039093 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-11f8-account-create-8r2bz"] Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.944660 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:30 crc kubenswrapper[4762]: E1009 15:07:30.945169 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="octavia-amphora-httpd" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945189 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="octavia-amphora-httpd" Oct 09 15:07:30 crc kubenswrapper[4762]: E1009 15:07:30.945203 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerName="init" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945211 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerName="init" Oct 09 15:07:30 crc kubenswrapper[4762]: E1009 15:07:30.945235 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="init" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945243 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="init" Oct 09 15:07:30 crc kubenswrapper[4762]: E1009 15:07:30.945273 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerName="octavia-db-sync" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945281 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerName="octavia-db-sync" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945542 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" containerName="octavia-db-sync" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.945577 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="305ad0a3-36f8-4b79-a031-e6f0bec6f888" containerName="octavia-amphora-httpd" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.947359 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.960287 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:30 crc kubenswrapper[4762]: I1009 15:07:30.984791 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f26744c-c79a-497e-86e3-671990b949cf" path="/var/lib/kubelet/pods/9f26744c-c79a-497e-86e3-671990b949cf/volumes" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.061007 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.061135 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thm86\" (UniqueName: \"kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.061253 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.162734 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thm86\" (UniqueName: \"kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.163117 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.163184 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.163724 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.163863 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.185970 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thm86\" (UniqueName: \"kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86\") pod \"redhat-operators-qk6pn\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.284617 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:31 crc kubenswrapper[4762]: I1009 15:07:31.754711 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:31 crc kubenswrapper[4762]: W1009 15:07:31.764853 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42f3766b_e64a_4b77_8793_74fcccdefe75.slice/crio-06c0b427c0374ff3083b9cfb408982c520154e48bcac81ed71ee992e5748013e WatchSource:0}: Error finding container 06c0b427c0374ff3083b9cfb408982c520154e48bcac81ed71ee992e5748013e: Status 404 returned error can't find the container with id 06c0b427c0374ff3083b9cfb408982c520154e48bcac81ed71ee992e5748013e Oct 09 15:07:32 crc kubenswrapper[4762]: I1009 15:07:32.658648 4762 generic.go:334] "Generic (PLEG): container finished" podID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerID="ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7" exitCode=0 Oct 09 15:07:32 crc kubenswrapper[4762]: I1009 15:07:32.658684 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerDied","Data":"ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7"} Oct 09 15:07:32 crc kubenswrapper[4762]: I1009 15:07:32.658950 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerStarted","Data":"06c0b427c0374ff3083b9cfb408982c520154e48bcac81ed71ee992e5748013e"} Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.415555 4762 scope.go:117] "RemoveContainer" containerID="0191add82cf7774005aa261d004e69809edb71882a4814d6e3f21ce13afa00e8" Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.441658 4762 scope.go:117] "RemoveContainer" containerID="82554243ec96fd4f77cb39fd64c953e2d7914c8fb29579f17b5cccf81d0b62c0" Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.527296 4762 scope.go:117] "RemoveContainer" containerID="b4e08d50e4999114c3a7c9bfc2ed189dfb9cf8355fb5bc69672bcefcef912162" Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.564691 4762 scope.go:117] "RemoveContainer" containerID="b2a7512f49583bfa03624913cb2fe31a8e8cfc8af65db7142a5c1c571d761056" Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.623156 4762 scope.go:117] "RemoveContainer" containerID="b099ada6a97df818a3b684c9ee23e0829f81d05f2900ee0dad2f2c9f6aef4121" Oct 09 15:07:33 crc kubenswrapper[4762]: I1009 15:07:33.646359 4762 scope.go:117] "RemoveContainer" containerID="76a4aa6c17e32c18f840bcebadb8bb9329686c7c962ae308ef41b16b02d10316" Oct 09 15:07:34 crc kubenswrapper[4762]: I1009 15:07:34.719496 4762 generic.go:334] "Generic (PLEG): container finished" podID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerID="61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423" exitCode=0 Oct 09 15:07:34 crc kubenswrapper[4762]: I1009 15:07:34.719553 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerDied","Data":"61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423"} Oct 09 15:07:35 crc kubenswrapper[4762]: I1009 15:07:35.739758 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerStarted","Data":"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2"} Oct 09 15:07:35 crc kubenswrapper[4762]: I1009 15:07:35.763834 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qk6pn" podStartSLOduration=3.278440324 podStartE2EDuration="5.763811059s" podCreationTimestamp="2025-10-09 15:07:30 +0000 UTC" firstStartedPulling="2025-10-09 15:07:32.660584518 +0000 UTC m=+6128.434375567" lastFinishedPulling="2025-10-09 15:07:35.145955263 +0000 UTC m=+6130.919746302" observedRunningTime="2025-10-09 15:07:35.755139612 +0000 UTC m=+6131.528930651" watchObservedRunningTime="2025-10-09 15:07:35.763811059 +0000 UTC m=+6131.537602098" Oct 09 15:07:37 crc kubenswrapper[4762]: I1009 15:07:37.964840 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:07:37 crc kubenswrapper[4762]: E1009 15:07:37.966102 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:07:38 crc kubenswrapper[4762]: I1009 15:07:38.040279 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-nd6kt"] Oct 09 15:07:38 crc kubenswrapper[4762]: I1009 15:07:38.049091 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-nd6kt"] Oct 09 15:07:38 crc kubenswrapper[4762]: I1009 15:07:38.984363 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e522e18c-0226-4f6b-bb52-74435e991373" path="/var/lib/kubelet/pods/e522e18c-0226-4f6b-bb52-74435e991373/volumes" Oct 09 15:07:41 crc kubenswrapper[4762]: I1009 15:07:41.285140 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:41 crc kubenswrapper[4762]: I1009 15:07:41.285537 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:41 crc kubenswrapper[4762]: I1009 15:07:41.340434 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:41 crc kubenswrapper[4762]: I1009 15:07:41.880675 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:41 crc kubenswrapper[4762]: I1009 15:07:41.936612 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:43 crc kubenswrapper[4762]: I1009 15:07:43.828708 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qk6pn" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="registry-server" containerID="cri-o://b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2" gracePeriod=2 Oct 09 15:07:43 crc kubenswrapper[4762]: E1009 15:07:43.883435 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42f3766b_e64a_4b77_8793_74fcccdefe75.slice/crio-b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2.scope\": RecentStats: unable to find data in memory cache]" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.305999 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.492872 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content\") pod \"42f3766b-e64a-4b77-8793-74fcccdefe75\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.492987 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thm86\" (UniqueName: \"kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86\") pod \"42f3766b-e64a-4b77-8793-74fcccdefe75\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.493177 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities\") pod \"42f3766b-e64a-4b77-8793-74fcccdefe75\" (UID: \"42f3766b-e64a-4b77-8793-74fcccdefe75\") " Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.495094 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities" (OuterVolumeSpecName: "utilities") pod "42f3766b-e64a-4b77-8793-74fcccdefe75" (UID: "42f3766b-e64a-4b77-8793-74fcccdefe75"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.506075 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86" (OuterVolumeSpecName: "kube-api-access-thm86") pod "42f3766b-e64a-4b77-8793-74fcccdefe75" (UID: "42f3766b-e64a-4b77-8793-74fcccdefe75"). InnerVolumeSpecName "kube-api-access-thm86". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.596073 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.596106 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thm86\" (UniqueName: \"kubernetes.io/projected/42f3766b-e64a-4b77-8793-74fcccdefe75-kube-api-access-thm86\") on node \"crc\" DevicePath \"\"" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.666802 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42f3766b-e64a-4b77-8793-74fcccdefe75" (UID: "42f3766b-e64a-4b77-8793-74fcccdefe75"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.698564 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42f3766b-e64a-4b77-8793-74fcccdefe75-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.844587 4762 generic.go:334] "Generic (PLEG): container finished" podID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerID="b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2" exitCode=0 Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.844657 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerDied","Data":"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2"} Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.844685 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qk6pn" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.844715 4762 scope.go:117] "RemoveContainer" containerID="b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.844699 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qk6pn" event={"ID":"42f3766b-e64a-4b77-8793-74fcccdefe75","Type":"ContainerDied","Data":"06c0b427c0374ff3083b9cfb408982c520154e48bcac81ed71ee992e5748013e"} Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.873847 4762 scope.go:117] "RemoveContainer" containerID="61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.893048 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.904823 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qk6pn"] Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.905918 4762 scope.go:117] "RemoveContainer" containerID="ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.954303 4762 scope.go:117] "RemoveContainer" containerID="b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2" Oct 09 15:07:44 crc kubenswrapper[4762]: E1009 15:07:44.954987 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2\": container with ID starting with b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2 not found: ID does not exist" containerID="b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.955036 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2"} err="failed to get container status \"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2\": rpc error: code = NotFound desc = could not find container \"b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2\": container with ID starting with b29df7c708cf80702307fe1d6dc4e03f610dae98e37fbe5709020f4054836cd2 not found: ID does not exist" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.955068 4762 scope.go:117] "RemoveContainer" containerID="61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423" Oct 09 15:07:44 crc kubenswrapper[4762]: E1009 15:07:44.955536 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423\": container with ID starting with 61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423 not found: ID does not exist" containerID="61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.955579 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423"} err="failed to get container status \"61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423\": rpc error: code = NotFound desc = could not find container \"61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423\": container with ID starting with 61bb6de1707cd6807d9fa19da0c9286c2df813d6357db343db5163ba9ce88423 not found: ID does not exist" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.955603 4762 scope.go:117] "RemoveContainer" containerID="ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7" Oct 09 15:07:44 crc kubenswrapper[4762]: E1009 15:07:44.956038 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7\": container with ID starting with ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7 not found: ID does not exist" containerID="ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.956068 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7"} err="failed to get container status \"ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7\": rpc error: code = NotFound desc = could not find container \"ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7\": container with ID starting with ad9cefe0614b590bdbc16520a522adb0563e823299b5916e151bf0d350004ce7 not found: ID does not exist" Oct 09 15:07:44 crc kubenswrapper[4762]: I1009 15:07:44.981906 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" path="/var/lib/kubelet/pods/42f3766b-e64a-4b77-8793-74fcccdefe75/volumes" Oct 09 15:07:49 crc kubenswrapper[4762]: I1009 15:07:49.965892 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:07:49 crc kubenswrapper[4762]: E1009 15:07:49.966819 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.408987 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:03 crc kubenswrapper[4762]: E1009 15:08:03.414354 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="extract-content" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.414384 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="extract-content" Oct 09 15:08:03 crc kubenswrapper[4762]: E1009 15:08:03.414411 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="registry-server" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.414418 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="registry-server" Oct 09 15:08:03 crc kubenswrapper[4762]: E1009 15:08:03.414427 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="extract-utilities" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.414434 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="extract-utilities" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.414677 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f3766b-e64a-4b77-8793-74fcccdefe75" containerName="registry-server" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.415684 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.418144 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.418332 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-btr59" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.418548 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.424319 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.426532 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.510138 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.510386 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-log" containerID="cri-o://80c961758dba56cc8b42c9001e9c413045ee23035c3a73f19e76a8b6d9acdfe9" gracePeriod=30 Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.511110 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-httpd" containerID="cri-o://fd1c0ea3a8cb306001de4697dbca9a41f4d87c20a678b5e27ebe877b47b21d44" gracePeriod=30 Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.554140 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.556117 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.572055 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.580102 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hthvb\" (UniqueName: \"kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.580151 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.580209 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.580288 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.581118 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.620775 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.622460 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-log" containerID="cri-o://62f47e82a0f87475769613f0d9ed3cf973df67d747a5a1ad8e91672e853f070b" gracePeriod=30 Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.623096 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-httpd" containerID="cri-o://70304c6746eee214fd29bb7fa8f5a1bf48fff3539a97e9aa1c580bed630a4889" gracePeriod=30 Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683147 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683214 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683255 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683301 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hthvb\" (UniqueName: \"kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683324 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683359 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683393 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683435 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft42n\" (UniqueName: \"kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683476 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.683567 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.685007 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.686176 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.686182 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.689303 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.699400 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hthvb\" (UniqueName: \"kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb\") pod \"horizon-6dbdd4f7ff-nxf7g\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.735351 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.784920 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.784993 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.785040 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.785097 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.785161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft42n\" (UniqueName: \"kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.786881 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.788040 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.788420 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.790719 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:03 crc kubenswrapper[4762]: I1009 15:08:03.802510 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft42n\" (UniqueName: \"kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n\") pod \"horizon-789b448649-k6n2g\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:03.886151 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:03.966299 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:08:04 crc kubenswrapper[4762]: E1009 15:08:03.966496 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.097790 4762 generic.go:334] "Generic (PLEG): container finished" podID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerID="62f47e82a0f87475769613f0d9ed3cf973df67d747a5a1ad8e91672e853f070b" exitCode=143 Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.098618 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerDied","Data":"62f47e82a0f87475769613f0d9ed3cf973df67d747a5a1ad8e91672e853f070b"} Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.108319 4762 generic.go:334] "Generic (PLEG): container finished" podID="4103bdae-6031-44fc-b303-057c8e736522" containerID="80c961758dba56cc8b42c9001e9c413045ee23035c3a73f19e76a8b6d9acdfe9" exitCode=143 Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.108379 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerDied","Data":"80c961758dba56cc8b42c9001e9c413045ee23035c3a73f19e76a8b6d9acdfe9"} Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.229738 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.270269 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.272052 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.282942 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.413991 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.414071 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.414137 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.414159 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.414174 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvwwc\" (UniqueName: \"kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.520319 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.520466 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.520500 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.520523 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvwwc\" (UniqueName: \"kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.520656 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.521549 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.522420 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.522669 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.527272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.541237 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvwwc\" (UniqueName: \"kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc\") pod \"horizon-5bdb8689c-79w7r\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.657648 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.877795 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.900821 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:08:04 crc kubenswrapper[4762]: I1009 15:08:04.918378 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:04 crc kubenswrapper[4762]: W1009 15:08:04.924209 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a94127b_6d6f_478e_a3ef_1ef9af954f1f.slice/crio-4f4413ec45f4dcb95f1b585f53998075e43d42b3a7390715f80b6a7a1a31edcb WatchSource:0}: Error finding container 4f4413ec45f4dcb95f1b585f53998075e43d42b3a7390715f80b6a7a1a31edcb: Status 404 returned error can't find the container with id 4f4413ec45f4dcb95f1b585f53998075e43d42b3a7390715f80b6a7a1a31edcb Oct 09 15:08:05 crc kubenswrapper[4762]: I1009 15:08:05.119134 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerStarted","Data":"c8fe5b744f6aa7da62b6ea2d9b34e72276dc48f5fad40981d575bbafd15f25d9"} Oct 09 15:08:05 crc kubenswrapper[4762]: I1009 15:08:05.121046 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerStarted","Data":"4f4413ec45f4dcb95f1b585f53998075e43d42b3a7390715f80b6a7a1a31edcb"} Oct 09 15:08:05 crc kubenswrapper[4762]: W1009 15:08:05.184235 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda961ac97_2010_40c4_9c5a_a68222cb2a8b.slice/crio-f39c3f602da3062baf07677c917a0a61b5c2c9dfe3f3e870c4db858979eadf34 WatchSource:0}: Error finding container f39c3f602da3062baf07677c917a0a61b5c2c9dfe3f3e870c4db858979eadf34: Status 404 returned error can't find the container with id f39c3f602da3062baf07677c917a0a61b5c2c9dfe3f3e870c4db858979eadf34 Oct 09 15:08:05 crc kubenswrapper[4762]: I1009 15:08:05.184404 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:08:06 crc kubenswrapper[4762]: I1009 15:08:06.139054 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerStarted","Data":"f39c3f602da3062baf07677c917a0a61b5c2c9dfe3f3e870c4db858979eadf34"} Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.164987 4762 generic.go:334] "Generic (PLEG): container finished" podID="4103bdae-6031-44fc-b303-057c8e736522" containerID="fd1c0ea3a8cb306001de4697dbca9a41f4d87c20a678b5e27ebe877b47b21d44" exitCode=0 Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.165068 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerDied","Data":"fd1c0ea3a8cb306001de4697dbca9a41f4d87c20a678b5e27ebe877b47b21d44"} Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.173132 4762 generic.go:334] "Generic (PLEG): container finished" podID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerID="70304c6746eee214fd29bb7fa8f5a1bf48fff3539a97e9aa1c580bed630a4889" exitCode=0 Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.173185 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerDied","Data":"70304c6746eee214fd29bb7fa8f5a1bf48fff3539a97e9aa1c580bed630a4889"} Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.387145 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489027 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489549 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489575 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489612 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489718 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489765 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9r9s\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.489885 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data\") pod \"15358f7d-4847-4efc-b8e9-828a992f5bce\" (UID: \"15358f7d-4847-4efc-b8e9-828a992f5bce\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.490725 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs" (OuterVolumeSpecName: "logs") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.491198 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.496182 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph" (OuterVolumeSpecName: "ceph") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.496486 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.496505 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.496514 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15358f7d-4847-4efc-b8e9-828a992f5bce-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.498790 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s" (OuterVolumeSpecName: "kube-api-access-b9r9s") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "kube-api-access-b9r9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.505390 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts" (OuterVolumeSpecName: "scripts") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.522840 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.523837 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.569956 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data" (OuterVolumeSpecName: "config-data") pod "15358f7d-4847-4efc-b8e9-828a992f5bce" (UID: "15358f7d-4847-4efc-b8e9-828a992f5bce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.601667 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.601698 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.601709 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9r9s\" (UniqueName: \"kubernetes.io/projected/15358f7d-4847-4efc-b8e9-828a992f5bce-kube-api-access-b9r9s\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.601721 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15358f7d-4847-4efc-b8e9-828a992f5bce-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.702999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703050 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703085 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703145 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703187 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrpmf\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703261 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703362 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle\") pod \"4103bdae-6031-44fc-b303-057c8e736522\" (UID: \"4103bdae-6031-44fc-b303-057c8e736522\") " Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.703694 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs" (OuterVolumeSpecName: "logs") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.704323 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.704863 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.706848 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts" (OuterVolumeSpecName: "scripts") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.708397 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph" (OuterVolumeSpecName: "ceph") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.708673 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf" (OuterVolumeSpecName: "kube-api-access-qrpmf") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "kube-api-access-qrpmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.737030 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.761143 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data" (OuterVolumeSpecName: "config-data") pod "4103bdae-6031-44fc-b303-057c8e736522" (UID: "4103bdae-6031-44fc-b303-057c8e736522"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807151 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807225 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807240 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807255 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrpmf\" (UniqueName: \"kubernetes.io/projected/4103bdae-6031-44fc-b303-057c8e736522-kube-api-access-qrpmf\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807271 4762 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4103bdae-6031-44fc-b303-057c8e736522-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:07 crc kubenswrapper[4762]: I1009 15:08:07.807282 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4103bdae-6031-44fc-b303-057c8e736522-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.186943 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15358f7d-4847-4efc-b8e9-828a992f5bce","Type":"ContainerDied","Data":"630e22431f1bb5ef1287dccc79fe96763f1cfb290504dc3b0d9de83a0f2bc6ae"} Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.186995 4762 scope.go:117] "RemoveContainer" containerID="70304c6746eee214fd29bb7fa8f5a1bf48fff3539a97e9aa1c580bed630a4889" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.187151 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.195400 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4103bdae-6031-44fc-b303-057c8e736522","Type":"ContainerDied","Data":"0e72b5e512ca5f269ce3796ac185a3a591f3863481fabc85fb5256946784c7a1"} Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.195478 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.225793 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.231816 4762 scope.go:117] "RemoveContainer" containerID="62f47e82a0f87475769613f0d9ed3cf973df67d747a5a1ad8e91672e853f070b" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.251598 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.272934 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.292726 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.299586 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: E1009 15:08:08.300134 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300148 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: E1009 15:08:08.300170 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300175 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: E1009 15:08:08.300194 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300200 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: E1009 15:08:08.300214 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300219 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300413 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300428 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300438 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4103bdae-6031-44fc-b303-057c8e736522" containerName="glance-httpd" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.300454 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" containerName="glance-log" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.301627 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.303890 4762 scope.go:117] "RemoveContainer" containerID="fd1c0ea3a8cb306001de4697dbca9a41f4d87c20a678b5e27ebe877b47b21d44" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.304328 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-zpb7m" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.305418 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.310000 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.314089 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.316856 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.318382 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.330762 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.342311 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.372372 4762 scope.go:117] "RemoveContainer" containerID="80c961758dba56cc8b42c9001e9c413045ee23035c3a73f19e76a8b6d9acdfe9" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418303 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-ceph\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418397 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-scripts\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418539 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-config-data\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418654 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-logs\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418728 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggtcc\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-kube-api-access-ggtcc\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418830 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-kube-api-access-kr2cq\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.418984 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419020 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419045 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419063 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419110 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-ceph\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419164 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419184 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.419341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-logs\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524669 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-kube-api-access-kr2cq\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524751 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524773 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524790 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524805 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524851 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-ceph\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524880 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524897 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524916 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-logs\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.524974 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-ceph\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525514 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525000 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-scripts\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525525 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-logs\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525512 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66e2e5e7-f382-48a2-bea7-06768a36b7af-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525699 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-config-data\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525736 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-logs\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.525775 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggtcc\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-kube-api-access-ggtcc\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.529078 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eab9237-9f33-4b78-8416-7b0aef0b1a18-logs\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.547914 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-ceph\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.548452 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.548514 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.551243 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e2e5e7-f382-48a2-bea7-06768a36b7af-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.552670 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-config-data\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.553205 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-scripts\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.557207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eab9237-9f33-4b78-8416-7b0aef0b1a18-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.560013 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/66e2e5e7-f382-48a2-bea7-06768a36b7af-kube-api-access-kr2cq\") pod \"glance-default-internal-api-0\" (UID: \"66e2e5e7-f382-48a2-bea7-06768a36b7af\") " pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.563377 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggtcc\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-kube-api-access-ggtcc\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.564250 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9eab9237-9f33-4b78-8416-7b0aef0b1a18-ceph\") pod \"glance-default-external-api-0\" (UID: \"9eab9237-9f33-4b78-8416-7b0aef0b1a18\") " pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.650204 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.664687 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.995782 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15358f7d-4847-4efc-b8e9-828a992f5bce" path="/var/lib/kubelet/pods/15358f7d-4847-4efc-b8e9-828a992f5bce/volumes" Oct 09 15:08:08 crc kubenswrapper[4762]: I1009 15:08:08.997866 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4103bdae-6031-44fc-b303-057c8e736522" path="/var/lib/kubelet/pods/4103bdae-6031-44fc-b303-057c8e736522/volumes" Oct 09 15:08:14 crc kubenswrapper[4762]: I1009 15:08:14.282247 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerStarted","Data":"bac4754b7520d66c9513310eb838cd888699f008020fbbe299e9aeeb2def4984"} Oct 09 15:08:14 crc kubenswrapper[4762]: I1009 15:08:14.287173 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerStarted","Data":"853340a5feffa1754da154fded3d5f59ffe84c1a27348039ccd3bfac0b232bbc"} Oct 09 15:08:14 crc kubenswrapper[4762]: I1009 15:08:14.289537 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerStarted","Data":"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2"} Oct 09 15:08:14 crc kubenswrapper[4762]: I1009 15:08:14.399211 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 15:08:14 crc kubenswrapper[4762]: W1009 15:08:14.402533 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9eab9237_9f33_4b78_8416_7b0aef0b1a18.slice/crio-9c184452425a50ff269eb87555d8aff5409ee7af68a798442bb337c541992314 WatchSource:0}: Error finding container 9c184452425a50ff269eb87555d8aff5409ee7af68a798442bb337c541992314: Status 404 returned error can't find the container with id 9c184452425a50ff269eb87555d8aff5409ee7af68a798442bb337c541992314 Oct 09 15:08:14 crc kubenswrapper[4762]: I1009 15:08:14.513028 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.310374 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9eab9237-9f33-4b78-8416-7b0aef0b1a18","Type":"ContainerStarted","Data":"c740a9511c655b82ef5abdf4dd32b3966c4080cd14d90d6af697804939475d36"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.311013 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9eab9237-9f33-4b78-8416-7b0aef0b1a18","Type":"ContainerStarted","Data":"9c184452425a50ff269eb87555d8aff5409ee7af68a798442bb337c541992314"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.315560 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerStarted","Data":"4de5ed046823a6fd5603590309ae6b0c7ce1c94661cc116d1c21e2e0b17ebbdc"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.320492 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerStarted","Data":"ecdca7a23b1fee924628b161cd62183f5fb60e94e705cf936fb29b2db147783f"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.320548 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6dbdd4f7ff-nxf7g" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon-log" containerID="cri-o://853340a5feffa1754da154fded3d5f59ffe84c1a27348039ccd3bfac0b232bbc" gracePeriod=30 Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.320560 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6dbdd4f7ff-nxf7g" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon" containerID="cri-o://ecdca7a23b1fee924628b161cd62183f5fb60e94e705cf936fb29b2db147783f" gracePeriod=30 Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.322372 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66e2e5e7-f382-48a2-bea7-06768a36b7af","Type":"ContainerStarted","Data":"cd897e6ac1fb1d3263c544342e336d35d144dadb1dfb5664e5b1773b70059c7f"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.322405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66e2e5e7-f382-48a2-bea7-06768a36b7af","Type":"ContainerStarted","Data":"78c034beeb21ed0fc7b6d1f1adb4d4594f4ff6ba8ee704e11dc6747cf3f0dfcc"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.325414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerStarted","Data":"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c"} Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.347862 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-789b448649-k6n2g" podStartSLOduration=3.338843095 podStartE2EDuration="12.347840877s" podCreationTimestamp="2025-10-09 15:08:03 +0000 UTC" firstStartedPulling="2025-10-09 15:08:04.900598621 +0000 UTC m=+6160.674389660" lastFinishedPulling="2025-10-09 15:08:13.909596403 +0000 UTC m=+6169.683387442" observedRunningTime="2025-10-09 15:08:15.337137238 +0000 UTC m=+6171.110928277" watchObservedRunningTime="2025-10-09 15:08:15.347840877 +0000 UTC m=+6171.121631916" Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.358176 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5bdb8689c-79w7r" podStartSLOduration=2.66325725 podStartE2EDuration="11.358157265s" podCreationTimestamp="2025-10-09 15:08:04 +0000 UTC" firstStartedPulling="2025-10-09 15:08:05.186026171 +0000 UTC m=+6160.959817210" lastFinishedPulling="2025-10-09 15:08:13.880926186 +0000 UTC m=+6169.654717225" observedRunningTime="2025-10-09 15:08:15.353918885 +0000 UTC m=+6171.127709934" watchObservedRunningTime="2025-10-09 15:08:15.358157265 +0000 UTC m=+6171.131948304" Oct 09 15:08:15 crc kubenswrapper[4762]: I1009 15:08:15.372387 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6dbdd4f7ff-nxf7g" podStartSLOduration=3.316211766 podStartE2EDuration="12.372367676s" podCreationTimestamp="2025-10-09 15:08:03 +0000 UTC" firstStartedPulling="2025-10-09 15:08:04.92744052 +0000 UTC m=+6160.701231559" lastFinishedPulling="2025-10-09 15:08:13.98359643 +0000 UTC m=+6169.757387469" observedRunningTime="2025-10-09 15:08:15.369887031 +0000 UTC m=+6171.143678080" watchObservedRunningTime="2025-10-09 15:08:15.372367676 +0000 UTC m=+6171.146158715" Oct 09 15:08:16 crc kubenswrapper[4762]: I1009 15:08:16.337376 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9eab9237-9f33-4b78-8416-7b0aef0b1a18","Type":"ContainerStarted","Data":"0690c7bb759e7aea131aae660cb2f664d5fab40e65ce5b2b8cdc100d184129ec"} Oct 09 15:08:16 crc kubenswrapper[4762]: I1009 15:08:16.339900 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66e2e5e7-f382-48a2-bea7-06768a36b7af","Type":"ContainerStarted","Data":"bf0e12df452fea4defd9323c8a50e8d622800a17def41eeb625a0a83c4a1ca88"} Oct 09 15:08:16 crc kubenswrapper[4762]: I1009 15:08:16.368411 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.368386946 podStartE2EDuration="8.368386946s" podCreationTimestamp="2025-10-09 15:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:08:16.367964795 +0000 UTC m=+6172.141755834" watchObservedRunningTime="2025-10-09 15:08:16.368386946 +0000 UTC m=+6172.142177985" Oct 09 15:08:16 crc kubenswrapper[4762]: I1009 15:08:16.395449 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.395422219 podStartE2EDuration="8.395422219s" podCreationTimestamp="2025-10-09 15:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:08:16.387322468 +0000 UTC m=+6172.161113527" watchObservedRunningTime="2025-10-09 15:08:16.395422219 +0000 UTC m=+6172.169213258" Oct 09 15:08:16 crc kubenswrapper[4762]: I1009 15:08:16.965281 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:08:16 crc kubenswrapper[4762]: E1009 15:08:16.965593 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.650499 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.650833 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.666512 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.666593 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.685117 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.700719 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.723832 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:18 crc kubenswrapper[4762]: I1009 15:08:18.729777 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:19 crc kubenswrapper[4762]: I1009 15:08:19.383124 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 15:08:19 crc kubenswrapper[4762]: I1009 15:08:19.383171 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 15:08:19 crc kubenswrapper[4762]: I1009 15:08:19.384138 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:19 crc kubenswrapper[4762]: I1009 15:08:19.384170 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:21 crc kubenswrapper[4762]: I1009 15:08:21.065628 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-7cjp5"] Oct 09 15:08:21 crc kubenswrapper[4762]: I1009 15:08:21.077814 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-7cjp5"] Oct 09 15:08:22 crc kubenswrapper[4762]: I1009 15:08:22.426942 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 15:08:22 crc kubenswrapper[4762]: I1009 15:08:22.547395 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 15:08:22 crc kubenswrapper[4762]: I1009 15:08:22.739511 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:22 crc kubenswrapper[4762]: I1009 15:08:22.740423 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 15:08:22 crc kubenswrapper[4762]: I1009 15:08:22.980008 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c97dad5-9737-43d5-9b06-6a6eab836b92" path="/var/lib/kubelet/pods/5c97dad5-9737-43d5-9b06-6a6eab836b92/volumes" Oct 09 15:08:23 crc kubenswrapper[4762]: I1009 15:08:23.736401 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:23 crc kubenswrapper[4762]: I1009 15:08:23.886855 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:23 crc kubenswrapper[4762]: I1009 15:08:23.886914 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:24 crc kubenswrapper[4762]: I1009 15:08:24.658774 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:24 crc kubenswrapper[4762]: I1009 15:08:24.659109 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:24 crc kubenswrapper[4762]: I1009 15:08:24.660787 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Oct 09 15:08:30 crc kubenswrapper[4762]: I1009 15:08:30.965107 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:08:30 crc kubenswrapper[4762]: E1009 15:08:30.968442 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:31 crc kubenswrapper[4762]: I1009 15:08:31.029628 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-06b1-account-create-ccvjs"] Oct 09 15:08:31 crc kubenswrapper[4762]: I1009 15:08:31.039125 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-06b1-account-create-ccvjs"] Oct 09 15:08:32 crc kubenswrapper[4762]: I1009 15:08:32.980780 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75969a22-cb73-4249-94b8-03e1fcbf4c4f" path="/var/lib/kubelet/pods/75969a22-cb73-4249-94b8-03e1fcbf4c4f/volumes" Oct 09 15:08:33 crc kubenswrapper[4762]: I1009 15:08:33.811162 4762 scope.go:117] "RemoveContainer" containerID="83c5f3091b6a9df7c98181c9d09065689902f7e29bbb06a7c418769f03b49ac1" Oct 09 15:08:33 crc kubenswrapper[4762]: I1009 15:08:33.835253 4762 scope.go:117] "RemoveContainer" containerID="eb6091a4bea7e6616ea57f3bd1c85cd2aaeba71fc29bbcd7bb42b1021350f1fa" Oct 09 15:08:33 crc kubenswrapper[4762]: I1009 15:08:33.862521 4762 scope.go:117] "RemoveContainer" containerID="ddac7de601de87ea8deb21d5dede13585863054b7eb48bb8418be06df0854e33" Oct 09 15:08:33 crc kubenswrapper[4762]: I1009 15:08:33.920723 4762 scope.go:117] "RemoveContainer" containerID="d7d381eb1e6f629f3b92ec8ae1dc756a6d5556f3da26e3fce416995e83b59a7f" Oct 09 15:08:33 crc kubenswrapper[4762]: I1009 15:08:33.982190 4762 scope.go:117] "RemoveContainer" containerID="9c5462953cd08ce2e9fa02aa83040fac8cf875c8a7ad10f5ba77f43c1c4a623a" Oct 09 15:08:36 crc kubenswrapper[4762]: I1009 15:08:36.069887 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:36 crc kubenswrapper[4762]: I1009 15:08:36.528115 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:37 crc kubenswrapper[4762]: I1009 15:08:37.739926 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.169002 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.232989 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.273703 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.275950 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.291465 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.314518 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.314683 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffjw6\" (UniqueName: \"kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.314707 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.416951 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffjw6\" (UniqueName: \"kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.416998 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.417083 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.417548 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.417580 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.438090 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffjw6\" (UniqueName: \"kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6\") pod \"redhat-marketplace-8t5rh\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.600083 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-789b448649-k6n2g" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" containerID="cri-o://4de5ed046823a6fd5603590309ae6b0c7ce1c94661cc116d1c21e2e0b17ebbdc" gracePeriod=30 Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.599595 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-789b448649-k6n2g" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon-log" containerID="cri-o://bac4754b7520d66c9513310eb838cd888699f008020fbbe299e9aeeb2def4984" gracePeriod=30 Oct 09 15:08:38 crc kubenswrapper[4762]: I1009 15:08:38.601569 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:39 crc kubenswrapper[4762]: I1009 15:08:39.170940 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:39 crc kubenswrapper[4762]: I1009 15:08:39.608220 4762 generic.go:334] "Generic (PLEG): container finished" podID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerID="43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51" exitCode=0 Oct 09 15:08:39 crc kubenswrapper[4762]: I1009 15:08:39.608528 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerDied","Data":"43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51"} Oct 09 15:08:39 crc kubenswrapper[4762]: I1009 15:08:39.608552 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerStarted","Data":"21262bc41f5a679e46055ba7d139f36d20fd940b0f1dee659f9d6ed2506ffea7"} Oct 09 15:08:40 crc kubenswrapper[4762]: I1009 15:08:40.029856 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-bm22n"] Oct 09 15:08:40 crc kubenswrapper[4762]: I1009 15:08:40.041068 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-bm22n"] Oct 09 15:08:40 crc kubenswrapper[4762]: I1009 15:08:40.977071 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7419200-74d1-4b46-9d46-2d143ae77596" path="/var/lib/kubelet/pods/d7419200-74d1-4b46-9d46-2d143ae77596/volumes" Oct 09 15:08:41 crc kubenswrapper[4762]: I1009 15:08:41.632945 4762 generic.go:334] "Generic (PLEG): container finished" podID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerID="a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795" exitCode=0 Oct 09 15:08:41 crc kubenswrapper[4762]: I1009 15:08:41.632994 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerDied","Data":"a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795"} Oct 09 15:08:42 crc kubenswrapper[4762]: I1009 15:08:42.644354 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerStarted","Data":"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c"} Oct 09 15:08:42 crc kubenswrapper[4762]: I1009 15:08:42.653879 4762 generic.go:334] "Generic (PLEG): container finished" podID="85ac6699-379e-419f-9ea6-7ea695694279" containerID="4de5ed046823a6fd5603590309ae6b0c7ce1c94661cc116d1c21e2e0b17ebbdc" exitCode=0 Oct 09 15:08:42 crc kubenswrapper[4762]: I1009 15:08:42.653927 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerDied","Data":"4de5ed046823a6fd5603590309ae6b0c7ce1c94661cc116d1c21e2e0b17ebbdc"} Oct 09 15:08:42 crc kubenswrapper[4762]: I1009 15:08:42.966283 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:08:42 crc kubenswrapper[4762]: E1009 15:08:42.967173 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:43 crc kubenswrapper[4762]: I1009 15:08:43.887156 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-789b448649-k6n2g" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.114:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8080: connect: connection refused" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.688952 4762 generic.go:334] "Generic (PLEG): container finished" podID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerID="ecdca7a23b1fee924628b161cd62183f5fb60e94e705cf936fb29b2db147783f" exitCode=137 Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.689461 4762 generic.go:334] "Generic (PLEG): container finished" podID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerID="853340a5feffa1754da154fded3d5f59ffe84c1a27348039ccd3bfac0b232bbc" exitCode=137 Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.689487 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerDied","Data":"ecdca7a23b1fee924628b161cd62183f5fb60e94e705cf936fb29b2db147783f"} Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.689518 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerDied","Data":"853340a5feffa1754da154fded3d5f59ffe84c1a27348039ccd3bfac0b232bbc"} Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.840578 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.872062 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data\") pod \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.872168 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key\") pod \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.872188 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts\") pod \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.872224 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hthvb\" (UniqueName: \"kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb\") pod \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.872317 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs\") pod \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\" (UID: \"1a94127b-6d6f-478e-a3ef-1ef9af954f1f\") " Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.873241 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs" (OuterVolumeSpecName: "logs") pod "1a94127b-6d6f-478e-a3ef-1ef9af954f1f" (UID: "1a94127b-6d6f-478e-a3ef-1ef9af954f1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.883888 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1a94127b-6d6f-478e-a3ef-1ef9af954f1f" (UID: "1a94127b-6d6f-478e-a3ef-1ef9af954f1f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.900927 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb" (OuterVolumeSpecName: "kube-api-access-hthvb") pod "1a94127b-6d6f-478e-a3ef-1ef9af954f1f" (UID: "1a94127b-6d6f-478e-a3ef-1ef9af954f1f"). InnerVolumeSpecName "kube-api-access-hthvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.904731 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8t5rh" podStartSLOduration=5.4187533 podStartE2EDuration="7.90471043s" podCreationTimestamp="2025-10-09 15:08:38 +0000 UTC" firstStartedPulling="2025-10-09 15:08:39.609859988 +0000 UTC m=+6195.383651027" lastFinishedPulling="2025-10-09 15:08:42.095817118 +0000 UTC m=+6197.869608157" observedRunningTime="2025-10-09 15:08:42.671393053 +0000 UTC m=+6198.445184112" watchObservedRunningTime="2025-10-09 15:08:45.90471043 +0000 UTC m=+6201.678501469" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.912017 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data" (OuterVolumeSpecName: "config-data") pod "1a94127b-6d6f-478e-a3ef-1ef9af954f1f" (UID: "1a94127b-6d6f-478e-a3ef-1ef9af954f1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.919272 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts" (OuterVolumeSpecName: "scripts") pod "1a94127b-6d6f-478e-a3ef-1ef9af954f1f" (UID: "1a94127b-6d6f-478e-a3ef-1ef9af954f1f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.975762 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.975799 4762 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.975813 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.975823 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hthvb\" (UniqueName: \"kubernetes.io/projected/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-kube-api-access-hthvb\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:45 crc kubenswrapper[4762]: I1009 15:08:45.975835 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a94127b-6d6f-478e-a3ef-1ef9af954f1f-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.702023 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6dbdd4f7ff-nxf7g" event={"ID":"1a94127b-6d6f-478e-a3ef-1ef9af954f1f","Type":"ContainerDied","Data":"4f4413ec45f4dcb95f1b585f53998075e43d42b3a7390715f80b6a7a1a31edcb"} Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.702057 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6dbdd4f7ff-nxf7g" Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.702355 4762 scope.go:117] "RemoveContainer" containerID="ecdca7a23b1fee924628b161cd62183f5fb60e94e705cf936fb29b2db147783f" Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.744678 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.759159 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6dbdd4f7ff-nxf7g"] Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.895210 4762 scope.go:117] "RemoveContainer" containerID="853340a5feffa1754da154fded3d5f59ffe84c1a27348039ccd3bfac0b232bbc" Oct 09 15:08:46 crc kubenswrapper[4762]: I1009 15:08:46.984982 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" path="/var/lib/kubelet/pods/1a94127b-6d6f-478e-a3ef-1ef9af954f1f/volumes" Oct 09 15:08:48 crc kubenswrapper[4762]: I1009 15:08:48.602655 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:48 crc kubenswrapper[4762]: I1009 15:08:48.603024 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:48 crc kubenswrapper[4762]: I1009 15:08:48.656435 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:48 crc kubenswrapper[4762]: I1009 15:08:48.772624 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:48 crc kubenswrapper[4762]: I1009 15:08:48.887790 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:50 crc kubenswrapper[4762]: I1009 15:08:50.744008 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8t5rh" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="registry-server" containerID="cri-o://0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c" gracePeriod=2 Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.232423 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.325697 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffjw6\" (UniqueName: \"kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6\") pod \"ece5f131-43b5-496f-8887-cd09f18b1c0c\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.325802 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities\") pod \"ece5f131-43b5-496f-8887-cd09f18b1c0c\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.325893 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content\") pod \"ece5f131-43b5-496f-8887-cd09f18b1c0c\" (UID: \"ece5f131-43b5-496f-8887-cd09f18b1c0c\") " Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.326826 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities" (OuterVolumeSpecName: "utilities") pod "ece5f131-43b5-496f-8887-cd09f18b1c0c" (UID: "ece5f131-43b5-496f-8887-cd09f18b1c0c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.332909 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6" (OuterVolumeSpecName: "kube-api-access-ffjw6") pod "ece5f131-43b5-496f-8887-cd09f18b1c0c" (UID: "ece5f131-43b5-496f-8887-cd09f18b1c0c"). InnerVolumeSpecName "kube-api-access-ffjw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.340002 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ece5f131-43b5-496f-8887-cd09f18b1c0c" (UID: "ece5f131-43b5-496f-8887-cd09f18b1c0c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.427960 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffjw6\" (UniqueName: \"kubernetes.io/projected/ece5f131-43b5-496f-8887-cd09f18b1c0c-kube-api-access-ffjw6\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.427995 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.428005 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece5f131-43b5-496f-8887-cd09f18b1c0c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.758456 4762 generic.go:334] "Generic (PLEG): container finished" podID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerID="0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c" exitCode=0 Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.758500 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerDied","Data":"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c"} Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.758523 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8t5rh" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.758541 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8t5rh" event={"ID":"ece5f131-43b5-496f-8887-cd09f18b1c0c","Type":"ContainerDied","Data":"21262bc41f5a679e46055ba7d139f36d20fd940b0f1dee659f9d6ed2506ffea7"} Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.758572 4762 scope.go:117] "RemoveContainer" containerID="0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.779132 4762 scope.go:117] "RemoveContainer" containerID="a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.799162 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.809719 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8t5rh"] Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.816922 4762 scope.go:117] "RemoveContainer" containerID="43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.857295 4762 scope.go:117] "RemoveContainer" containerID="0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c" Oct 09 15:08:51 crc kubenswrapper[4762]: E1009 15:08:51.859496 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c\": container with ID starting with 0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c not found: ID does not exist" containerID="0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.859529 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c"} err="failed to get container status \"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c\": rpc error: code = NotFound desc = could not find container \"0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c\": container with ID starting with 0fd213b44bcba1ecba5b8f2e06f5e3fd6f979c5d6d4b26985151bd9436cd773c not found: ID does not exist" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.859568 4762 scope.go:117] "RemoveContainer" containerID="a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795" Oct 09 15:08:51 crc kubenswrapper[4762]: E1009 15:08:51.861199 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795\": container with ID starting with a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795 not found: ID does not exist" containerID="a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.861249 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795"} err="failed to get container status \"a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795\": rpc error: code = NotFound desc = could not find container \"a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795\": container with ID starting with a08ce6fe2f7765462af76343683d86f480fab5ce58a9180ac97c49def7ccd795 not found: ID does not exist" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.861278 4762 scope.go:117] "RemoveContainer" containerID="43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51" Oct 09 15:08:51 crc kubenswrapper[4762]: E1009 15:08:51.861778 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51\": container with ID starting with 43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51 not found: ID does not exist" containerID="43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51" Oct 09 15:08:51 crc kubenswrapper[4762]: I1009 15:08:51.861834 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51"} err="failed to get container status \"43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51\": rpc error: code = NotFound desc = could not find container \"43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51\": container with ID starting with 43b6aaaab43873ab9d15c37b0e7c7435f9a806b59516448642b7beef9c6afa51 not found: ID does not exist" Oct 09 15:08:52 crc kubenswrapper[4762]: I1009 15:08:52.983840 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" path="/var/lib/kubelet/pods/ece5f131-43b5-496f-8887-cd09f18b1c0c/volumes" Oct 09 15:08:53 crc kubenswrapper[4762]: I1009 15:08:53.887293 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-789b448649-k6n2g" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.114:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8080: connect: connection refused" Oct 09 15:08:55 crc kubenswrapper[4762]: I1009 15:08:55.965816 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:08:55 crc kubenswrapper[4762]: E1009 15:08:55.966381 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033229 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:08:57 crc kubenswrapper[4762]: E1009 15:08:57.033674 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="extract-content" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033692 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="extract-content" Oct 09 15:08:57 crc kubenswrapper[4762]: E1009 15:08:57.033716 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033725 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon" Oct 09 15:08:57 crc kubenswrapper[4762]: E1009 15:08:57.033737 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon-log" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033744 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon-log" Oct 09 15:08:57 crc kubenswrapper[4762]: E1009 15:08:57.033772 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="registry-server" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033780 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="registry-server" Oct 09 15:08:57 crc kubenswrapper[4762]: E1009 15:08:57.033799 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="extract-utilities" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.033807 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="extract-utilities" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.034061 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ece5f131-43b5-496f-8887-cd09f18b1c0c" containerName="registry-server" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.034076 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.034096 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a94127b-6d6f-478e-a3ef-1ef9af954f1f" containerName="horizon-log" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.037244 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.044931 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.153944 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msd7h\" (UniqueName: \"kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.154360 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.155204 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.256690 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msd7h\" (UniqueName: \"kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.256815 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.256869 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.257338 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.257441 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.282367 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msd7h\" (UniqueName: \"kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h\") pod \"certified-operators-47l2p\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.369703 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:08:57 crc kubenswrapper[4762]: I1009 15:08:57.939301 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:08:58 crc kubenswrapper[4762]: I1009 15:08:58.872158 4762 generic.go:334] "Generic (PLEG): container finished" podID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerID="7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc" exitCode=0 Oct 09 15:08:58 crc kubenswrapper[4762]: I1009 15:08:58.872202 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerDied","Data":"7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc"} Oct 09 15:08:58 crc kubenswrapper[4762]: I1009 15:08:58.872447 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerStarted","Data":"fec510c37b013763e08f76483cd8118312e3bccd0850008c9f38f08f388a6acb"} Oct 09 15:08:59 crc kubenswrapper[4762]: I1009 15:08:59.883986 4762 generic.go:334] "Generic (PLEG): container finished" podID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerID="859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5" exitCode=0 Oct 09 15:08:59 crc kubenswrapper[4762]: I1009 15:08:59.884067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerDied","Data":"859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5"} Oct 09 15:09:00 crc kubenswrapper[4762]: I1009 15:09:00.904983 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerStarted","Data":"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9"} Oct 09 15:09:03 crc kubenswrapper[4762]: I1009 15:09:03.888720 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-789b448649-k6n2g" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.114:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.114:8080: connect: connection refused" Oct 09 15:09:03 crc kubenswrapper[4762]: I1009 15:09:03.889942 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:09:03 crc kubenswrapper[4762]: I1009 15:09:03.931261 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-47l2p" podStartSLOduration=6.367200888 podStartE2EDuration="7.931243877s" podCreationTimestamp="2025-10-09 15:08:56 +0000 UTC" firstStartedPulling="2025-10-09 15:08:58.875577986 +0000 UTC m=+6214.649369015" lastFinishedPulling="2025-10-09 15:09:00.439620965 +0000 UTC m=+6216.213412004" observedRunningTime="2025-10-09 15:09:00.926177892 +0000 UTC m=+6216.699968941" watchObservedRunningTime="2025-10-09 15:09:03.931243877 +0000 UTC m=+6219.705034916" Oct 09 15:09:06 crc kubenswrapper[4762]: I1009 15:09:06.967805 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:09:06 crc kubenswrapper[4762]: E1009 15:09:06.970261 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:09:07 crc kubenswrapper[4762]: I1009 15:09:07.371271 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:07 crc kubenswrapper[4762]: I1009 15:09:07.371349 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:07 crc kubenswrapper[4762]: I1009 15:09:07.446455 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:08 crc kubenswrapper[4762]: I1009 15:09:08.032589 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:08 crc kubenswrapper[4762]: I1009 15:09:08.080263 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:09:08 crc kubenswrapper[4762]: I1009 15:09:08.999167 4762 generic.go:334] "Generic (PLEG): container finished" podID="85ac6699-379e-419f-9ea6-7ea695694279" containerID="bac4754b7520d66c9513310eb838cd888699f008020fbbe299e9aeeb2def4984" exitCode=137 Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:08.999369 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerDied","Data":"bac4754b7520d66c9513310eb838cd888699f008020fbbe299e9aeeb2def4984"} Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.000172 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-789b448649-k6n2g" event={"ID":"85ac6699-379e-419f-9ea6-7ea695694279","Type":"ContainerDied","Data":"c8fe5b744f6aa7da62b6ea2d9b34e72276dc48f5fad40981d575bbafd15f25d9"} Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.000188 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8fe5b744f6aa7da62b6ea2d9b34e72276dc48f5fad40981d575bbafd15f25d9" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.029706 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.197357 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft42n\" (UniqueName: \"kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n\") pod \"85ac6699-379e-419f-9ea6-7ea695694279\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.197438 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key\") pod \"85ac6699-379e-419f-9ea6-7ea695694279\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.197502 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data\") pod \"85ac6699-379e-419f-9ea6-7ea695694279\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.197550 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts\") pod \"85ac6699-379e-419f-9ea6-7ea695694279\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.197569 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs\") pod \"85ac6699-379e-419f-9ea6-7ea695694279\" (UID: \"85ac6699-379e-419f-9ea6-7ea695694279\") " Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.200409 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs" (OuterVolumeSpecName: "logs") pod "85ac6699-379e-419f-9ea6-7ea695694279" (UID: "85ac6699-379e-419f-9ea6-7ea695694279"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.208886 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "85ac6699-379e-419f-9ea6-7ea695694279" (UID: "85ac6699-379e-419f-9ea6-7ea695694279"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.209069 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n" (OuterVolumeSpecName: "kube-api-access-ft42n") pod "85ac6699-379e-419f-9ea6-7ea695694279" (UID: "85ac6699-379e-419f-9ea6-7ea695694279"). InnerVolumeSpecName "kube-api-access-ft42n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.224602 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data" (OuterVolumeSpecName: "config-data") pod "85ac6699-379e-419f-9ea6-7ea695694279" (UID: "85ac6699-379e-419f-9ea6-7ea695694279"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.225012 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts" (OuterVolumeSpecName: "scripts") pod "85ac6699-379e-419f-9ea6-7ea695694279" (UID: "85ac6699-379e-419f-9ea6-7ea695694279"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.299824 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.300120 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/85ac6699-379e-419f-9ea6-7ea695694279-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.300129 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85ac6699-379e-419f-9ea6-7ea695694279-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.300138 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft42n\" (UniqueName: \"kubernetes.io/projected/85ac6699-379e-419f-9ea6-7ea695694279-kube-api-access-ft42n\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:09 crc kubenswrapper[4762]: I1009 15:09:09.300149 4762 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/85ac6699-379e-419f-9ea6-7ea695694279-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.020789 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-789b448649-k6n2g" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.021446 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-47l2p" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="registry-server" containerID="cri-o://46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9" gracePeriod=2 Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.063287 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-bbvxh"] Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.075488 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-bbvxh"] Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.086315 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.097769 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-789b448649-k6n2g"] Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.480592 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.639675 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content\") pod \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.639783 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities\") pod \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.639849 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msd7h\" (UniqueName: \"kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h\") pod \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\" (UID: \"d75f10a2-3bb9-45c9-819e-ab38392b4ad0\") " Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.640798 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities" (OuterVolumeSpecName: "utilities") pod "d75f10a2-3bb9-45c9-819e-ab38392b4ad0" (UID: "d75f10a2-3bb9-45c9-819e-ab38392b4ad0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.645581 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h" (OuterVolumeSpecName: "kube-api-access-msd7h") pod "d75f10a2-3bb9-45c9-819e-ab38392b4ad0" (UID: "d75f10a2-3bb9-45c9-819e-ab38392b4ad0"). InnerVolumeSpecName "kube-api-access-msd7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.686446 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d75f10a2-3bb9-45c9-819e-ab38392b4ad0" (UID: "d75f10a2-3bb9-45c9-819e-ab38392b4ad0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.742054 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.742081 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.742090 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msd7h\" (UniqueName: \"kubernetes.io/projected/d75f10a2-3bb9-45c9-819e-ab38392b4ad0-kube-api-access-msd7h\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.976479 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85ac6699-379e-419f-9ea6-7ea695694279" path="/var/lib/kubelet/pods/85ac6699-379e-419f-9ea6-7ea695694279/volumes" Oct 09 15:09:10 crc kubenswrapper[4762]: I1009 15:09:10.977287 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e86ddeaa-60ac-49e4-8ca4-39985b6689ed" path="/var/lib/kubelet/pods/e86ddeaa-60ac-49e4-8ca4-39985b6689ed/volumes" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.034044 4762 generic.go:334] "Generic (PLEG): container finished" podID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerID="46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9" exitCode=0 Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.034102 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerDied","Data":"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9"} Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.034138 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-47l2p" event={"ID":"d75f10a2-3bb9-45c9-819e-ab38392b4ad0","Type":"ContainerDied","Data":"fec510c37b013763e08f76483cd8118312e3bccd0850008c9f38f08f388a6acb"} Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.034154 4762 scope.go:117] "RemoveContainer" containerID="46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.034177 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-47l2p" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.060353 4762 scope.go:117] "RemoveContainer" containerID="859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.070362 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.077306 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-47l2p"] Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.084829 4762 scope.go:117] "RemoveContainer" containerID="7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.124829 4762 scope.go:117] "RemoveContainer" containerID="46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9" Oct 09 15:09:11 crc kubenswrapper[4762]: E1009 15:09:11.125442 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9\": container with ID starting with 46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9 not found: ID does not exist" containerID="46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.125509 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9"} err="failed to get container status \"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9\": rpc error: code = NotFound desc = could not find container \"46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9\": container with ID starting with 46bf67f2b1da91c4fd486926cfa5ba31df910afcc00de61d856d21faca662da9 not found: ID does not exist" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.125537 4762 scope.go:117] "RemoveContainer" containerID="859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5" Oct 09 15:09:11 crc kubenswrapper[4762]: E1009 15:09:11.125997 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5\": container with ID starting with 859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5 not found: ID does not exist" containerID="859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.126031 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5"} err="failed to get container status \"859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5\": rpc error: code = NotFound desc = could not find container \"859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5\": container with ID starting with 859bf68e1cb8672f1febae33fc91603e0575ef4ee21e5c7cfc9a566b27542bf5 not found: ID does not exist" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.126057 4762 scope.go:117] "RemoveContainer" containerID="7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc" Oct 09 15:09:11 crc kubenswrapper[4762]: E1009 15:09:11.126431 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc\": container with ID starting with 7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc not found: ID does not exist" containerID="7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc" Oct 09 15:09:11 crc kubenswrapper[4762]: I1009 15:09:11.126470 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc"} err="failed to get container status \"7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc\": rpc error: code = NotFound desc = could not find container \"7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc\": container with ID starting with 7a506683f28cfa457bd2a3b99b76494f95c6f1e530f3b520c8910b6c4a11e8dc not found: ID does not exist" Oct 09 15:09:12 crc kubenswrapper[4762]: I1009 15:09:12.984945 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" path="/var/lib/kubelet/pods/d75f10a2-3bb9-45c9-819e-ab38392b4ad0/volumes" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115373 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:14 crc kubenswrapper[4762]: E1009 15:09:14.115858 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="extract-content" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115875 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="extract-content" Oct 09 15:09:14 crc kubenswrapper[4762]: E1009 15:09:14.115905 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon-log" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115913 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon-log" Oct 09 15:09:14 crc kubenswrapper[4762]: E1009 15:09:14.115927 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="extract-utilities" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115937 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="extract-utilities" Oct 09 15:09:14 crc kubenswrapper[4762]: E1009 15:09:14.115966 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115975 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" Oct 09 15:09:14 crc kubenswrapper[4762]: E1009 15:09:14.115989 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="registry-server" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.115997 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="registry-server" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.116240 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon-log" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.116267 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75f10a2-3bb9-45c9-819e-ab38392b4ad0" containerName="registry-server" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.116287 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ac6699-379e-419f-9ea6-7ea695694279" containerName="horizon" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.118202 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.128162 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.214065 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.214572 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fjmr\" (UniqueName: \"kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.214700 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.316973 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.317058 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fjmr\" (UniqueName: \"kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.317116 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.317399 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.317753 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.341856 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fjmr\" (UniqueName: \"kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr\") pod \"community-operators-s4gpx\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.454134 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:14 crc kubenswrapper[4762]: I1009 15:09:14.942017 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:15 crc kubenswrapper[4762]: I1009 15:09:15.079722 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerStarted","Data":"952f879e50375528c9f1f52bafb1fb9c8eab289a4e9441933cc27ffdbade4277"} Oct 09 15:09:16 crc kubenswrapper[4762]: I1009 15:09:16.091312 4762 generic.go:334] "Generic (PLEG): container finished" podID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerID="79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c" exitCode=0 Oct 09 15:09:16 crc kubenswrapper[4762]: I1009 15:09:16.091421 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerDied","Data":"79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c"} Oct 09 15:09:17 crc kubenswrapper[4762]: I1009 15:09:17.106578 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerStarted","Data":"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8"} Oct 09 15:09:18 crc kubenswrapper[4762]: I1009 15:09:18.115517 4762 generic.go:334] "Generic (PLEG): container finished" podID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerID="307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8" exitCode=0 Oct 09 15:09:18 crc kubenswrapper[4762]: I1009 15:09:18.115559 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerDied","Data":"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8"} Oct 09 15:09:18 crc kubenswrapper[4762]: I1009 15:09:18.966133 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:09:18 crc kubenswrapper[4762]: E1009 15:09:18.966834 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:09:19 crc kubenswrapper[4762]: I1009 15:09:19.126221 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerStarted","Data":"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da"} Oct 09 15:09:19 crc kubenswrapper[4762]: I1009 15:09:19.158322 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s4gpx" podStartSLOduration=2.673541923 podStartE2EDuration="5.158301031s" podCreationTimestamp="2025-10-09 15:09:14 +0000 UTC" firstStartedPulling="2025-10-09 15:09:16.093923343 +0000 UTC m=+6231.867714382" lastFinishedPulling="2025-10-09 15:09:18.578682451 +0000 UTC m=+6234.352473490" observedRunningTime="2025-10-09 15:09:19.14632669 +0000 UTC m=+6234.920117769" watchObservedRunningTime="2025-10-09 15:09:19.158301031 +0000 UTC m=+6234.932092080" Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.037785 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a3a2-account-create-hsvrr"] Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.048769 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a3a2-account-create-hsvrr"] Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.943967 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6df886d647-szb9q"] Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.950829 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.963801 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6df886d647-szb9q"] Oct 09 15:09:20 crc kubenswrapper[4762]: I1009 15:09:20.979470 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa9f5502-22d1-4a17-bf73-b1514be38584" path="/var/lib/kubelet/pods/aa9f5502-22d1-4a17-bf73-b1514be38584/volumes" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.054814 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-horizon-secret-key\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.054910 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-logs\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.055175 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbbql\" (UniqueName: \"kubernetes.io/projected/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-kube-api-access-cbbql\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.055242 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-config-data\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.055375 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-scripts\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.157715 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-horizon-secret-key\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.157829 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-logs\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.157888 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbbql\" (UniqueName: \"kubernetes.io/projected/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-kube-api-access-cbbql\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.157913 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-config-data\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.158033 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-scripts\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.158859 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-scripts\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.160105 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-logs\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.167015 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-config-data\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.170362 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-horizon-secret-key\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.184693 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbbql\" (UniqueName: \"kubernetes.io/projected/d418fd45-a3dc-46ca-8d2b-3e82ba33d483-kube-api-access-cbbql\") pod \"horizon-6df886d647-szb9q\" (UID: \"d418fd45-a3dc-46ca-8d2b-3e82ba33d483\") " pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.279425 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:21 crc kubenswrapper[4762]: I1009 15:09:21.735816 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6df886d647-szb9q"] Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.164535 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6df886d647-szb9q" event={"ID":"d418fd45-a3dc-46ca-8d2b-3e82ba33d483","Type":"ContainerStarted","Data":"f0a6142ba8ecc4f3244570cafdad2ff28f5f59ac5d19f2ea4e00816fc8271643"} Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.164926 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6df886d647-szb9q" event={"ID":"d418fd45-a3dc-46ca-8d2b-3e82ba33d483","Type":"ContainerStarted","Data":"092c6090d76d795685b7bcdc78624d553523c435c59e76e5d37512af59221640"} Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.164943 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6df886d647-szb9q" event={"ID":"d418fd45-a3dc-46ca-8d2b-3e82ba33d483","Type":"ContainerStarted","Data":"b786fa473166abdf48b6d753db7f56e4b355266ce140b702fb782862d3ded6ec"} Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.190867 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6df886d647-szb9q" podStartSLOduration=2.190846081 podStartE2EDuration="2.190846081s" podCreationTimestamp="2025-10-09 15:09:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:09:22.183688255 +0000 UTC m=+6237.957479314" watchObservedRunningTime="2025-10-09 15:09:22.190846081 +0000 UTC m=+6237.964637140" Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.435690 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-wg5pg"] Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.441356 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.447784 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-wg5pg"] Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.585950 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj784\" (UniqueName: \"kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784\") pod \"heat-db-create-wg5pg\" (UID: \"b354ad3f-4a44-45e4-b3ff-f3efce2b0640\") " pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.687627 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj784\" (UniqueName: \"kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784\") pod \"heat-db-create-wg5pg\" (UID: \"b354ad3f-4a44-45e4-b3ff-f3efce2b0640\") " pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.711659 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj784\" (UniqueName: \"kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784\") pod \"heat-db-create-wg5pg\" (UID: \"b354ad3f-4a44-45e4-b3ff-f3efce2b0640\") " pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:22 crc kubenswrapper[4762]: I1009 15:09:22.775214 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:23 crc kubenswrapper[4762]: W1009 15:09:23.268451 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb354ad3f_4a44_45e4_b3ff_f3efce2b0640.slice/crio-162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448 WatchSource:0}: Error finding container 162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448: Status 404 returned error can't find the container with id 162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448 Oct 09 15:09:23 crc kubenswrapper[4762]: I1009 15:09:23.272579 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-wg5pg"] Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.187042 4762 generic.go:334] "Generic (PLEG): container finished" podID="b354ad3f-4a44-45e4-b3ff-f3efce2b0640" containerID="7906566976dd453da44b4542e334422f39955f4f84d26f42f9f99afb44cd5a1c" exitCode=0 Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.187552 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-wg5pg" event={"ID":"b354ad3f-4a44-45e4-b3ff-f3efce2b0640","Type":"ContainerDied","Data":"7906566976dd453da44b4542e334422f39955f4f84d26f42f9f99afb44cd5a1c"} Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.187577 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-wg5pg" event={"ID":"b354ad3f-4a44-45e4-b3ff-f3efce2b0640","Type":"ContainerStarted","Data":"162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448"} Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.454724 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.454791 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:24 crc kubenswrapper[4762]: I1009 15:09:24.510583 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.245126 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.309867 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.593052 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.652365 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj784\" (UniqueName: \"kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784\") pod \"b354ad3f-4a44-45e4-b3ff-f3efce2b0640\" (UID: \"b354ad3f-4a44-45e4-b3ff-f3efce2b0640\") " Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.656662 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784" (OuterVolumeSpecName: "kube-api-access-qj784") pod "b354ad3f-4a44-45e4-b3ff-f3efce2b0640" (UID: "b354ad3f-4a44-45e4-b3ff-f3efce2b0640"). InnerVolumeSpecName "kube-api-access-qj784". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:25 crc kubenswrapper[4762]: I1009 15:09:25.754406 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj784\" (UniqueName: \"kubernetes.io/projected/b354ad3f-4a44-45e4-b3ff-f3efce2b0640-kube-api-access-qj784\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.026897 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-c66sk"] Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.040607 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-c66sk"] Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.225118 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-wg5pg" Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.225169 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-wg5pg" event={"ID":"b354ad3f-4a44-45e4-b3ff-f3efce2b0640","Type":"ContainerDied","Data":"162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448"} Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.225219 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="162ddd9f39e0451311bf9e6ff4b145eb6b97b016e00ca6e39125cacce390a448" Oct 09 15:09:26 crc kubenswrapper[4762]: I1009 15:09:26.975115 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91f63920-a550-4f27-b780-f213efb1ca79" path="/var/lib/kubelet/pods/91f63920-a550-4f27-b780-f213efb1ca79/volumes" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.233051 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s4gpx" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="registry-server" containerID="cri-o://10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da" gracePeriod=2 Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.701304 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.793557 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content\") pod \"1b32f5b2-26c0-402f-b334-f155dd37b645\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.793682 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities\") pod \"1b32f5b2-26c0-402f-b334-f155dd37b645\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.793866 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fjmr\" (UniqueName: \"kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr\") pod \"1b32f5b2-26c0-402f-b334-f155dd37b645\" (UID: \"1b32f5b2-26c0-402f-b334-f155dd37b645\") " Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.794468 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities" (OuterVolumeSpecName: "utilities") pod "1b32f5b2-26c0-402f-b334-f155dd37b645" (UID: "1b32f5b2-26c0-402f-b334-f155dd37b645"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.795989 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.799245 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr" (OuterVolumeSpecName: "kube-api-access-5fjmr") pod "1b32f5b2-26c0-402f-b334-f155dd37b645" (UID: "1b32f5b2-26c0-402f-b334-f155dd37b645"). InnerVolumeSpecName "kube-api-access-5fjmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.841215 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b32f5b2-26c0-402f-b334-f155dd37b645" (UID: "1b32f5b2-26c0-402f-b334-f155dd37b645"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.898787 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b32f5b2-26c0-402f-b334-f155dd37b645-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:27 crc kubenswrapper[4762]: I1009 15:09:27.899163 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fjmr\" (UniqueName: \"kubernetes.io/projected/1b32f5b2-26c0-402f-b334-f155dd37b645-kube-api-access-5fjmr\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.246389 4762 generic.go:334] "Generic (PLEG): container finished" podID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerID="10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da" exitCode=0 Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.246441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerDied","Data":"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da"} Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.246470 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4gpx" event={"ID":"1b32f5b2-26c0-402f-b334-f155dd37b645","Type":"ContainerDied","Data":"952f879e50375528c9f1f52bafb1fb9c8eab289a4e9441933cc27ffdbade4277"} Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.246490 4762 scope.go:117] "RemoveContainer" containerID="10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.246673 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4gpx" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.271457 4762 scope.go:117] "RemoveContainer" containerID="307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.295394 4762 scope.go:117] "RemoveContainer" containerID="79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.341863 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.345235 4762 scope.go:117] "RemoveContainer" containerID="10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da" Oct 09 15:09:28 crc kubenswrapper[4762]: E1009 15:09:28.345623 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da\": container with ID starting with 10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da not found: ID does not exist" containerID="10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.345837 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da"} err="failed to get container status \"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da\": rpc error: code = NotFound desc = could not find container \"10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da\": container with ID starting with 10371e02e7d11b2c3e8c0fe2ca4cdd72226f3df5ac8cb8610e00bd2d6a3014da not found: ID does not exist" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.345867 4762 scope.go:117] "RemoveContainer" containerID="307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8" Oct 09 15:09:28 crc kubenswrapper[4762]: E1009 15:09:28.347028 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8\": container with ID starting with 307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8 not found: ID does not exist" containerID="307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.347062 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8"} err="failed to get container status \"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8\": rpc error: code = NotFound desc = could not find container \"307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8\": container with ID starting with 307302de87f75d967e3f04fb2a3a2fa77d601f674376a3168b4d82656a6570a8 not found: ID does not exist" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.347083 4762 scope.go:117] "RemoveContainer" containerID="79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c" Oct 09 15:09:28 crc kubenswrapper[4762]: E1009 15:09:28.347463 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c\": container with ID starting with 79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c not found: ID does not exist" containerID="79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.347501 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c"} err="failed to get container status \"79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c\": rpc error: code = NotFound desc = could not find container \"79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c\": container with ID starting with 79a68631797d39a6dabc1a2d4b27c941e978e93a2739d70214c37261632d2d2c not found: ID does not exist" Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.350622 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s4gpx"] Oct 09 15:09:28 crc kubenswrapper[4762]: I1009 15:09:28.976945 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" path="/var/lib/kubelet/pods/1b32f5b2-26c0-402f-b334-f155dd37b645/volumes" Oct 09 15:09:31 crc kubenswrapper[4762]: I1009 15:09:31.280413 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:31 crc kubenswrapper[4762]: I1009 15:09:31.282806 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:31 crc kubenswrapper[4762]: I1009 15:09:31.966246 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:09:31 crc kubenswrapper[4762]: E1009 15:09:31.966614 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.513027 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-a150-account-create-k9n99"] Oct 09 15:09:32 crc kubenswrapper[4762]: E1009 15:09:32.513669 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b354ad3f-4a44-45e4-b3ff-f3efce2b0640" containerName="mariadb-database-create" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.513721 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b354ad3f-4a44-45e4-b3ff-f3efce2b0640" containerName="mariadb-database-create" Oct 09 15:09:32 crc kubenswrapper[4762]: E1009 15:09:32.513757 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="registry-server" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.513767 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="registry-server" Oct 09 15:09:32 crc kubenswrapper[4762]: E1009 15:09:32.513782 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="extract-utilities" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.513790 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="extract-utilities" Oct 09 15:09:32 crc kubenswrapper[4762]: E1009 15:09:32.513820 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="extract-content" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.513827 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="extract-content" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.514095 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b32f5b2-26c0-402f-b334-f155dd37b645" containerName="registry-server" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.514122 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b354ad3f-4a44-45e4-b3ff-f3efce2b0640" containerName="mariadb-database-create" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.514997 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.517434 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.524692 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a150-account-create-k9n99"] Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.626928 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-226d2\" (UniqueName: \"kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2\") pod \"heat-a150-account-create-k9n99\" (UID: \"6e41efec-866f-49b8-b274-9c76764b4833\") " pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.728581 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-226d2\" (UniqueName: \"kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2\") pod \"heat-a150-account-create-k9n99\" (UID: \"6e41efec-866f-49b8-b274-9c76764b4833\") " pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.755330 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-226d2\" (UniqueName: \"kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2\") pod \"heat-a150-account-create-k9n99\" (UID: \"6e41efec-866f-49b8-b274-9c76764b4833\") " pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:32 crc kubenswrapper[4762]: I1009 15:09:32.847574 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:33 crc kubenswrapper[4762]: I1009 15:09:33.327985 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a150-account-create-k9n99"] Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.165434 4762 scope.go:117] "RemoveContainer" containerID="89770725c7eb395315bb62ef60eb314a9245b790ff0d1a72a970bf6e598fd56e" Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.213665 4762 scope.go:117] "RemoveContainer" containerID="ea6ed944f07582bbe022cce81110914fdb47c15a632919a2a48e4a6bd9487343" Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.263734 4762 scope.go:117] "RemoveContainer" containerID="fb98d1fd0e827f298ba45d81a9c2b4bdc163710db54f80ba5f293757ffcad614" Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.332203 4762 generic.go:334] "Generic (PLEG): container finished" podID="6e41efec-866f-49b8-b274-9c76764b4833" containerID="d7afa02855ceee3406230000672f1dc9089ed504b4220214955d5e6b409aec5e" exitCode=0 Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.332373 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a150-account-create-k9n99" event={"ID":"6e41efec-866f-49b8-b274-9c76764b4833","Type":"ContainerDied","Data":"d7afa02855ceee3406230000672f1dc9089ed504b4220214955d5e6b409aec5e"} Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.332794 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a150-account-create-k9n99" event={"ID":"6e41efec-866f-49b8-b274-9c76764b4833","Type":"ContainerStarted","Data":"4ef5c050defaa8bada30e8e223c191295e2cdd471fc10953e2048e9c61f1c932"} Oct 09 15:09:34 crc kubenswrapper[4762]: I1009 15:09:34.345025 4762 scope.go:117] "RemoveContainer" containerID="2dfba6d76801e0075d34d7853c0c6081fce89e07b4709ddaf1234b73b404c0b0" Oct 09 15:09:35 crc kubenswrapper[4762]: I1009 15:09:35.674496 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:35 crc kubenswrapper[4762]: I1009 15:09:35.805727 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-226d2\" (UniqueName: \"kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2\") pod \"6e41efec-866f-49b8-b274-9c76764b4833\" (UID: \"6e41efec-866f-49b8-b274-9c76764b4833\") " Oct 09 15:09:35 crc kubenswrapper[4762]: I1009 15:09:35.812089 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2" (OuterVolumeSpecName: "kube-api-access-226d2") pod "6e41efec-866f-49b8-b274-9c76764b4833" (UID: "6e41efec-866f-49b8-b274-9c76764b4833"). InnerVolumeSpecName "kube-api-access-226d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:35 crc kubenswrapper[4762]: I1009 15:09:35.908264 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-226d2\" (UniqueName: \"kubernetes.io/projected/6e41efec-866f-49b8-b274-9c76764b4833-kube-api-access-226d2\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:36 crc kubenswrapper[4762]: I1009 15:09:36.354073 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a150-account-create-k9n99" event={"ID":"6e41efec-866f-49b8-b274-9c76764b4833","Type":"ContainerDied","Data":"4ef5c050defaa8bada30e8e223c191295e2cdd471fc10953e2048e9c61f1c932"} Oct 09 15:09:36 crc kubenswrapper[4762]: I1009 15:09:36.354115 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ef5c050defaa8bada30e8e223c191295e2cdd471fc10953e2048e9c61f1c932" Oct 09 15:09:36 crc kubenswrapper[4762]: I1009 15:09:36.354167 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a150-account-create-k9n99" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.575860 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-b8d28"] Oct 09 15:09:37 crc kubenswrapper[4762]: E1009 15:09:37.576590 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e41efec-866f-49b8-b274-9c76764b4833" containerName="mariadb-account-create" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.576607 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e41efec-866f-49b8-b274-9c76764b4833" containerName="mariadb-account-create" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.576944 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e41efec-866f-49b8-b274-9c76764b4833" containerName="mariadb-account-create" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.577780 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.580661 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-vn8s6" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.580660 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.594608 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-b8d28"] Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.642572 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkh89\" (UniqueName: \"kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.642803 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.642847 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.745035 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkh89\" (UniqueName: \"kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.745156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.746201 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.751492 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.764829 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.766049 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkh89\" (UniqueName: \"kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89\") pod \"heat-db-sync-b8d28\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:37 crc kubenswrapper[4762]: I1009 15:09:37.909861 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:38 crc kubenswrapper[4762]: I1009 15:09:38.415372 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-b8d28"] Oct 09 15:09:38 crc kubenswrapper[4762]: W1009 15:09:38.422767 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5480a9a_8108_4a6a_84ee_41811ec3ae31.slice/crio-7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11 WatchSource:0}: Error finding container 7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11: Status 404 returned error can't find the container with id 7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11 Oct 09 15:09:39 crc kubenswrapper[4762]: I1009 15:09:39.388409 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-b8d28" event={"ID":"f5480a9a-8108-4a6a-84ee-41811ec3ae31","Type":"ContainerStarted","Data":"7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11"} Oct 09 15:09:41 crc kubenswrapper[4762]: I1009 15:09:41.283368 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6df886d647-szb9q" podUID="d418fd45-a3dc-46ca-8d2b-3e82ba33d483" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.121:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.121:8080: connect: connection refused" Oct 09 15:09:42 crc kubenswrapper[4762]: I1009 15:09:42.965631 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:09:42 crc kubenswrapper[4762]: E1009 15:09:42.966550 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:09:45 crc kubenswrapper[4762]: I1009 15:09:45.447819 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-b8d28" event={"ID":"f5480a9a-8108-4a6a-84ee-41811ec3ae31","Type":"ContainerStarted","Data":"da6659d3acaebd20bdd87d5d466e5ffc912ceadf589a349c1dccaf5de26ca303"} Oct 09 15:09:45 crc kubenswrapper[4762]: I1009 15:09:45.475142 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-b8d28" podStartSLOduration=1.783689664 podStartE2EDuration="8.47512416s" podCreationTimestamp="2025-10-09 15:09:37 +0000 UTC" firstStartedPulling="2025-10-09 15:09:38.424926454 +0000 UTC m=+6254.198717483" lastFinishedPulling="2025-10-09 15:09:45.11636092 +0000 UTC m=+6260.890151979" observedRunningTime="2025-10-09 15:09:45.464783581 +0000 UTC m=+6261.238574620" watchObservedRunningTime="2025-10-09 15:09:45.47512416 +0000 UTC m=+6261.248915199" Oct 09 15:09:47 crc kubenswrapper[4762]: I1009 15:09:47.467113 4762 generic.go:334] "Generic (PLEG): container finished" podID="f5480a9a-8108-4a6a-84ee-41811ec3ae31" containerID="da6659d3acaebd20bdd87d5d466e5ffc912ceadf589a349c1dccaf5de26ca303" exitCode=0 Oct 09 15:09:47 crc kubenswrapper[4762]: I1009 15:09:47.467361 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-b8d28" event={"ID":"f5480a9a-8108-4a6a-84ee-41811ec3ae31","Type":"ContainerDied","Data":"da6659d3acaebd20bdd87d5d466e5ffc912ceadf589a349c1dccaf5de26ca303"} Oct 09 15:09:48 crc kubenswrapper[4762]: I1009 15:09:48.848032 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:48 crc kubenswrapper[4762]: I1009 15:09:48.986359 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data\") pod \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " Oct 09 15:09:48 crc kubenswrapper[4762]: I1009 15:09:48.986485 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle\") pod \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " Oct 09 15:09:48 crc kubenswrapper[4762]: I1009 15:09:48.986529 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkh89\" (UniqueName: \"kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89\") pod \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\" (UID: \"f5480a9a-8108-4a6a-84ee-41811ec3ae31\") " Oct 09 15:09:48 crc kubenswrapper[4762]: I1009 15:09:48.991842 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89" (OuterVolumeSpecName: "kube-api-access-vkh89") pod "f5480a9a-8108-4a6a-84ee-41811ec3ae31" (UID: "f5480a9a-8108-4a6a-84ee-41811ec3ae31"). InnerVolumeSpecName "kube-api-access-vkh89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.013430 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5480a9a-8108-4a6a-84ee-41811ec3ae31" (UID: "f5480a9a-8108-4a6a-84ee-41811ec3ae31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.058767 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data" (OuterVolumeSpecName: "config-data") pod "f5480a9a-8108-4a6a-84ee-41811ec3ae31" (UID: "f5480a9a-8108-4a6a-84ee-41811ec3ae31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.089533 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.089583 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5480a9a-8108-4a6a-84ee-41811ec3ae31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.089594 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkh89\" (UniqueName: \"kubernetes.io/projected/f5480a9a-8108-4a6a-84ee-41811ec3ae31-kube-api-access-vkh89\") on node \"crc\" DevicePath \"\"" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.483368 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-b8d28" event={"ID":"f5480a9a-8108-4a6a-84ee-41811ec3ae31","Type":"ContainerDied","Data":"7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11"} Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.483414 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ac24fd3d2718b3ca4ef142cd2495a53b2c11b003eb40bc2bb15c45e96dcaf11" Oct 09 15:09:49 crc kubenswrapper[4762]: I1009 15:09:49.483420 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-b8d28" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.902261 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-64d559cb64-b8slk"] Oct 09 15:09:50 crc kubenswrapper[4762]: E1009 15:09:50.902961 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5480a9a-8108-4a6a-84ee-41811ec3ae31" containerName="heat-db-sync" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.902974 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5480a9a-8108-4a6a-84ee-41811ec3ae31" containerName="heat-db-sync" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.903141 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5480a9a-8108-4a6a-84ee-41811ec3ae31" containerName="heat-db-sync" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.903880 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.912523 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-64d559cb64-b8slk"] Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.913686 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-vn8s6" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.913863 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Oct 09 15:09:50 crc kubenswrapper[4762]: I1009 15:09:50.913885 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.029583 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.029682 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-combined-ca-bundle\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.029808 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggb4q\" (UniqueName: \"kubernetes.io/projected/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-kube-api-access-ggb4q\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.029855 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data-custom\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.086168 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-575dbc5777-btq8r"] Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.093542 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.095231 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.129132 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-dfb6f84ff-tqggt"] Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.130652 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.135480 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.137099 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggb4q\" (UniqueName: \"kubernetes.io/projected/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-kube-api-access-ggb4q\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.137182 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data-custom\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.137221 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.137253 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-combined-ca-bundle\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.143918 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.145893 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-config-data-custom\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.151315 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-575dbc5777-btq8r"] Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.158730 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-combined-ca-bundle\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.159302 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggb4q\" (UniqueName: \"kubernetes.io/projected/7f131e3f-02b4-400e-82f3-2b7c22a93c6d-kube-api-access-ggb4q\") pod \"heat-engine-64d559cb64-b8slk\" (UID: \"7f131e3f-02b4-400e-82f3-2b7c22a93c6d\") " pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.163100 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-dfb6f84ff-tqggt"] Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.229646 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241739 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-combined-ca-bundle\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241788 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6sbj\" (UniqueName: \"kubernetes.io/projected/84222e59-350f-4965-9269-c7d697837f75-kube-api-access-b6sbj\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241822 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241854 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241890 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-combined-ca-bundle\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241906 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqqrm\" (UniqueName: \"kubernetes.io/projected/81effce6-22e8-4a42-9087-decb53950618-kube-api-access-tqqrm\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.241980 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data-custom\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.242055 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data-custom\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.343449 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data-custom\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.343503 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data-custom\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.343549 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-combined-ca-bundle\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.343585 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6sbj\" (UniqueName: \"kubernetes.io/projected/84222e59-350f-4965-9269-c7d697837f75-kube-api-access-b6sbj\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.343619 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.344436 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.344560 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-combined-ca-bundle\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.344592 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqqrm\" (UniqueName: \"kubernetes.io/projected/81effce6-22e8-4a42-9087-decb53950618-kube-api-access-tqqrm\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.348499 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-combined-ca-bundle\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.348820 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data-custom\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.350356 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.354483 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81effce6-22e8-4a42-9087-decb53950618-config-data-custom\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.356347 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-combined-ca-bundle\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.360972 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84222e59-350f-4965-9269-c7d697837f75-config-data\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.365267 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6sbj\" (UniqueName: \"kubernetes.io/projected/84222e59-350f-4965-9269-c7d697837f75-kube-api-access-b6sbj\") pod \"heat-api-dfb6f84ff-tqggt\" (UID: \"84222e59-350f-4965-9269-c7d697837f75\") " pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.370406 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqqrm\" (UniqueName: \"kubernetes.io/projected/81effce6-22e8-4a42-9087-decb53950618-kube-api-access-tqqrm\") pod \"heat-cfnapi-575dbc5777-btq8r\" (UID: \"81effce6-22e8-4a42-9087-decb53950618\") " pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.425579 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.534352 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.857554 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-64d559cb64-b8slk"] Oct 09 15:09:51 crc kubenswrapper[4762]: I1009 15:09:51.950059 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-575dbc5777-btq8r"] Oct 09 15:09:51 crc kubenswrapper[4762]: W1009 15:09:51.952328 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81effce6_22e8_4a42_9087_decb53950618.slice/crio-ddb2ff17fc2154bc5853bfc5e364e756a63ff7ea7de09e2181dc3e21f54ae230 WatchSource:0}: Error finding container ddb2ff17fc2154bc5853bfc5e364e756a63ff7ea7de09e2181dc3e21f54ae230: Status 404 returned error can't find the container with id ddb2ff17fc2154bc5853bfc5e364e756a63ff7ea7de09e2181dc3e21f54ae230 Oct 09 15:09:52 crc kubenswrapper[4762]: I1009 15:09:52.109706 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-dfb6f84ff-tqggt"] Oct 09 15:09:52 crc kubenswrapper[4762]: W1009 15:09:52.117755 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84222e59_350f_4965_9269_c7d697837f75.slice/crio-33db186a066697c4dd17f18ca30de7d849a5f1a86de12a0718342b32bf29ef5a WatchSource:0}: Error finding container 33db186a066697c4dd17f18ca30de7d849a5f1a86de12a0718342b32bf29ef5a: Status 404 returned error can't find the container with id 33db186a066697c4dd17f18ca30de7d849a5f1a86de12a0718342b32bf29ef5a Oct 09 15:09:52 crc kubenswrapper[4762]: I1009 15:09:52.512863 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-64d559cb64-b8slk" event={"ID":"7f131e3f-02b4-400e-82f3-2b7c22a93c6d","Type":"ContainerStarted","Data":"41df8d5eab7135ed92e2c902837c473c980a2ff651d26f8295bfffa321b1ce88"} Oct 09 15:09:52 crc kubenswrapper[4762]: I1009 15:09:52.514569 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-dfb6f84ff-tqggt" event={"ID":"84222e59-350f-4965-9269-c7d697837f75","Type":"ContainerStarted","Data":"33db186a066697c4dd17f18ca30de7d849a5f1a86de12a0718342b32bf29ef5a"} Oct 09 15:09:52 crc kubenswrapper[4762]: I1009 15:09:52.515711 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-575dbc5777-btq8r" event={"ID":"81effce6-22e8-4a42-9087-decb53950618","Type":"ContainerStarted","Data":"ddb2ff17fc2154bc5853bfc5e364e756a63ff7ea7de09e2181dc3e21f54ae230"} Oct 09 15:09:53 crc kubenswrapper[4762]: I1009 15:09:53.530699 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-64d559cb64-b8slk" event={"ID":"7f131e3f-02b4-400e-82f3-2b7c22a93c6d","Type":"ContainerStarted","Data":"0dba969dc7052a0e1b49d87fb82914a08743c71c010aceba3d781e0c9478cc7a"} Oct 09 15:09:53 crc kubenswrapper[4762]: I1009 15:09:53.530958 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:09:53 crc kubenswrapper[4762]: I1009 15:09:53.549369 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-64d559cb64-b8slk" podStartSLOduration=3.549351495 podStartE2EDuration="3.549351495s" podCreationTimestamp="2025-10-09 15:09:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:09:53.548235606 +0000 UTC m=+6269.322026645" watchObservedRunningTime="2025-10-09 15:09:53.549351495 +0000 UTC m=+6269.323142534" Oct 09 15:09:54 crc kubenswrapper[4762]: I1009 15:09:54.123439 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:54 crc kubenswrapper[4762]: I1009 15:09:54.991959 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:09:54 crc kubenswrapper[4762]: E1009 15:09:54.992494 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.028316 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6df886d647-szb9q" Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.077845 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.078052 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon-log" containerID="cri-o://19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2" gracePeriod=30 Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.078533 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" containerID="cri-o://6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c" gracePeriod=30 Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.562556 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-575dbc5777-btq8r" event={"ID":"81effce6-22e8-4a42-9087-decb53950618","Type":"ContainerStarted","Data":"47109e21c4fe442eeb64944c248a983c1e9a770922c2d8487b2a190dbec1b4b8"} Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.562693 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.564189 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-dfb6f84ff-tqggt" event={"ID":"84222e59-350f-4965-9269-c7d697837f75","Type":"ContainerStarted","Data":"532e307de214c16f5af594bd67f2f077e5d2aa0260adfdc0117b4a4c617f033d"} Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.564351 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.584840 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-575dbc5777-btq8r" podStartSLOduration=2.351804412 podStartE2EDuration="5.584815361s" podCreationTimestamp="2025-10-09 15:09:51 +0000 UTC" firstStartedPulling="2025-10-09 15:09:51.959174926 +0000 UTC m=+6267.732965965" lastFinishedPulling="2025-10-09 15:09:55.192185875 +0000 UTC m=+6270.965976914" observedRunningTime="2025-10-09 15:09:56.579516973 +0000 UTC m=+6272.353308012" watchObservedRunningTime="2025-10-09 15:09:56.584815361 +0000 UTC m=+6272.358606400" Oct 09 15:09:56 crc kubenswrapper[4762]: I1009 15:09:56.617255 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-dfb6f84ff-tqggt" podStartSLOduration=2.5491023090000002 podStartE2EDuration="5.617232045s" podCreationTimestamp="2025-10-09 15:09:51 +0000 UTC" firstStartedPulling="2025-10-09 15:09:52.121492622 +0000 UTC m=+6267.895283661" lastFinishedPulling="2025-10-09 15:09:55.189622358 +0000 UTC m=+6270.963413397" observedRunningTime="2025-10-09 15:09:56.60205779 +0000 UTC m=+6272.375848849" watchObservedRunningTime="2025-10-09 15:09:56.617232045 +0000 UTC m=+6272.391023094" Oct 09 15:09:59 crc kubenswrapper[4762]: I1009 15:09:59.602178 4762 generic.go:334] "Generic (PLEG): container finished" podID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerID="6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c" exitCode=0 Oct 09 15:09:59 crc kubenswrapper[4762]: I1009 15:09:59.602280 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerDied","Data":"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c"} Oct 09 15:10:02 crc kubenswrapper[4762]: I1009 15:10:02.912016 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-575dbc5777-btq8r" Oct 09 15:10:03 crc kubenswrapper[4762]: I1009 15:10:03.120053 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-dfb6f84ff-tqggt" Oct 09 15:10:04 crc kubenswrapper[4762]: I1009 15:10:04.659336 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Oct 09 15:10:07 crc kubenswrapper[4762]: I1009 15:10:07.965295 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:10:07 crc kubenswrapper[4762]: E1009 15:10:07.966128 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:10:11 crc kubenswrapper[4762]: I1009 15:10:11.266519 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-64d559cb64-b8slk" Oct 09 15:10:14 crc kubenswrapper[4762]: I1009 15:10:14.658843 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Oct 09 15:10:20 crc kubenswrapper[4762]: I1009 15:10:20.965775 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:10:20 crc kubenswrapper[4762]: E1009 15:10:20.966524 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.147100 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj"] Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.149344 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.153991 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.157835 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj"] Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.245629 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.245701 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppjzt\" (UniqueName: \"kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.245858 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.347242 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.347306 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppjzt\" (UniqueName: \"kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.347439 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.347889 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.348136 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.373576 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppjzt\" (UniqueName: \"kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.475740 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:21 crc kubenswrapper[4762]: I1009 15:10:21.921385 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj"] Oct 09 15:10:22 crc kubenswrapper[4762]: I1009 15:10:22.826629 4762 generic.go:334] "Generic (PLEG): container finished" podID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerID="d1b016edf238c4fd0b754569fccbb45ad7547008cd2251a3db0c2a0bb19f1b29" exitCode=0 Oct 09 15:10:22 crc kubenswrapper[4762]: I1009 15:10:22.826837 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" event={"ID":"611c58bf-4757-4af3-adfb-6e13935f07ae","Type":"ContainerDied","Data":"d1b016edf238c4fd0b754569fccbb45ad7547008cd2251a3db0c2a0bb19f1b29"} Oct 09 15:10:22 crc kubenswrapper[4762]: I1009 15:10:22.828380 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" event={"ID":"611c58bf-4757-4af3-adfb-6e13935f07ae","Type":"ContainerStarted","Data":"a436466ea2c680d992f002b57e663c54779600a18d7ae2bb55fff4f05f81d5c3"} Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.071836 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-bwsbz"] Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.087784 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-bwsbz"] Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.096531 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-jxttd"] Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.105432 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-jxttd"] Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.659243 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5bdb8689c-79w7r" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.659921 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.852480 4762 generic.go:334] "Generic (PLEG): container finished" podID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerID="8c718802be9a0d6e146e61207b95eaf71f01c1e1f257cd2eb56833fbedd9270e" exitCode=0 Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.852536 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" event={"ID":"611c58bf-4757-4af3-adfb-6e13935f07ae","Type":"ContainerDied","Data":"8c718802be9a0d6e146e61207b95eaf71f01c1e1f257cd2eb56833fbedd9270e"} Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.986820 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c0662f2-1074-482c-98b7-7bfe6d91961f" path="/var/lib/kubelet/pods/1c0662f2-1074-482c-98b7-7bfe6d91961f/volumes" Oct 09 15:10:24 crc kubenswrapper[4762]: I1009 15:10:24.988187 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1c03653-3bb2-4dd8-bdfa-2800ea5be520" path="/var/lib/kubelet/pods/f1c03653-3bb2-4dd8-bdfa-2800ea5be520/volumes" Oct 09 15:10:25 crc kubenswrapper[4762]: I1009 15:10:25.032591 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pqqvf"] Oct 09 15:10:25 crc kubenswrapper[4762]: I1009 15:10:25.040921 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pqqvf"] Oct 09 15:10:25 crc kubenswrapper[4762]: I1009 15:10:25.863795 4762 generic.go:334] "Generic (PLEG): container finished" podID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerID="dfbec1717687f7f5f04fcc08e886f055c6f282aa09f1060dbde935cc5f06099d" exitCode=0 Oct 09 15:10:25 crc kubenswrapper[4762]: I1009 15:10:25.863849 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" event={"ID":"611c58bf-4757-4af3-adfb-6e13935f07ae","Type":"ContainerDied","Data":"dfbec1717687f7f5f04fcc08e886f055c6f282aa09f1060dbde935cc5f06099d"} Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.727712 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.864457 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs\") pod \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.864540 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts\") pod \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.864560 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data\") pod \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.864701 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvwwc\" (UniqueName: \"kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc\") pod \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.864747 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key\") pod \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\" (UID: \"a961ac97-2010-40c4-9c5a-a68222cb2a8b\") " Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.865602 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs" (OuterVolumeSpecName: "logs") pod "a961ac97-2010-40c4-9c5a-a68222cb2a8b" (UID: "a961ac97-2010-40c4-9c5a-a68222cb2a8b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.871843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc" (OuterVolumeSpecName: "kube-api-access-cvwwc") pod "a961ac97-2010-40c4-9c5a-a68222cb2a8b" (UID: "a961ac97-2010-40c4-9c5a-a68222cb2a8b"). InnerVolumeSpecName "kube-api-access-cvwwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.871962 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a961ac97-2010-40c4-9c5a-a68222cb2a8b" (UID: "a961ac97-2010-40c4-9c5a-a68222cb2a8b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.877683 4762 generic.go:334] "Generic (PLEG): container finished" podID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerID="19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2" exitCode=137 Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.877770 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerDied","Data":"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2"} Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.877807 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5bdb8689c-79w7r" event={"ID":"a961ac97-2010-40c4-9c5a-a68222cb2a8b","Type":"ContainerDied","Data":"f39c3f602da3062baf07677c917a0a61b5c2c9dfe3f3e870c4db858979eadf34"} Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.877823 4762 scope.go:117] "RemoveContainer" containerID="6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.878036 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5bdb8689c-79w7r" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.895100 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts" (OuterVolumeSpecName: "scripts") pod "a961ac97-2010-40c4-9c5a-a68222cb2a8b" (UID: "a961ac97-2010-40c4-9c5a-a68222cb2a8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.895141 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data" (OuterVolumeSpecName: "config-data") pod "a961ac97-2010-40c4-9c5a-a68222cb2a8b" (UID: "a961ac97-2010-40c4-9c5a-a68222cb2a8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.971443 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a961ac97-2010-40c4-9c5a-a68222cb2a8b-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.971578 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.971593 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a961ac97-2010-40c4-9c5a-a68222cb2a8b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.971605 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvwwc\" (UniqueName: \"kubernetes.io/projected/a961ac97-2010-40c4-9c5a-a68222cb2a8b-kube-api-access-cvwwc\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.971615 4762 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a961ac97-2010-40c4-9c5a-a68222cb2a8b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:26 crc kubenswrapper[4762]: I1009 15:10:26.978294 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27b71b3c-8972-4eb3-9bba-b917c7c51d4f" path="/var/lib/kubelet/pods/27b71b3c-8972-4eb3-9bba-b917c7c51d4f/volumes" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.169727 4762 scope.go:117] "RemoveContainer" containerID="19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.193785 4762 scope.go:117] "RemoveContainer" containerID="6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c" Oct 09 15:10:27 crc kubenswrapper[4762]: E1009 15:10:27.194146 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c\": container with ID starting with 6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c not found: ID does not exist" containerID="6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.194187 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c"} err="failed to get container status \"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c\": rpc error: code = NotFound desc = could not find container \"6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c\": container with ID starting with 6d4dbd1e0a09ca72c413facf5c712a0015949e14ae8e4be917fe4b4632280d5c not found: ID does not exist" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.194213 4762 scope.go:117] "RemoveContainer" containerID="19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2" Oct 09 15:10:27 crc kubenswrapper[4762]: E1009 15:10:27.194389 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2\": container with ID starting with 19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2 not found: ID does not exist" containerID="19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.194415 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2"} err="failed to get container status \"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2\": rpc error: code = NotFound desc = could not find container \"19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2\": container with ID starting with 19ea4671d2a1a1677af28eb278547dc13fa36508facaed0d4fba6f23a7ba4da2 not found: ID does not exist" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.239262 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.250811 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.258280 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5bdb8689c-79w7r"] Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.380865 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppjzt\" (UniqueName: \"kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt\") pod \"611c58bf-4757-4af3-adfb-6e13935f07ae\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.380964 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util\") pod \"611c58bf-4757-4af3-adfb-6e13935f07ae\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.380996 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle\") pod \"611c58bf-4757-4af3-adfb-6e13935f07ae\" (UID: \"611c58bf-4757-4af3-adfb-6e13935f07ae\") " Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.382747 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle" (OuterVolumeSpecName: "bundle") pod "611c58bf-4757-4af3-adfb-6e13935f07ae" (UID: "611c58bf-4757-4af3-adfb-6e13935f07ae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.385338 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt" (OuterVolumeSpecName: "kube-api-access-ppjzt") pod "611c58bf-4757-4af3-adfb-6e13935f07ae" (UID: "611c58bf-4757-4af3-adfb-6e13935f07ae"). InnerVolumeSpecName "kube-api-access-ppjzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.394420 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util" (OuterVolumeSpecName: "util") pod "611c58bf-4757-4af3-adfb-6e13935f07ae" (UID: "611c58bf-4757-4af3-adfb-6e13935f07ae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.483898 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppjzt\" (UniqueName: \"kubernetes.io/projected/611c58bf-4757-4af3-adfb-6e13935f07ae-kube-api-access-ppjzt\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.483966 4762 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-util\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.483986 4762 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/611c58bf-4757-4af3-adfb-6e13935f07ae-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.899491 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" event={"ID":"611c58bf-4757-4af3-adfb-6e13935f07ae","Type":"ContainerDied","Data":"a436466ea2c680d992f002b57e663c54779600a18d7ae2bb55fff4f05f81d5c3"} Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.899560 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a436466ea2c680d992f002b57e663c54779600a18d7ae2bb55fff4f05f81d5c3" Oct 09 15:10:27 crc kubenswrapper[4762]: I1009 15:10:27.899628 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj" Oct 09 15:10:28 crc kubenswrapper[4762]: E1009 15:10:28.093540 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod611c58bf_4757_4af3_adfb_6e13935f07ae.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod611c58bf_4757_4af3_adfb_6e13935f07ae.slice/crio-a436466ea2c680d992f002b57e663c54779600a18d7ae2bb55fff4f05f81d5c3\": RecentStats: unable to find data in memory cache]" Oct 09 15:10:28 crc kubenswrapper[4762]: I1009 15:10:28.976417 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" path="/var/lib/kubelet/pods/a961ac97-2010-40c4-9c5a-a68222cb2a8b/volumes" Oct 09 15:10:32 crc kubenswrapper[4762]: I1009 15:10:32.965344 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:10:32 crc kubenswrapper[4762]: E1009 15:10:32.966142 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:10:34 crc kubenswrapper[4762]: I1009 15:10:34.561235 4762 scope.go:117] "RemoveContainer" containerID="4dad820a499fe65d39497bd860c7110995e2ba06529baa8471443227aa357cc7" Oct 09 15:10:34 crc kubenswrapper[4762]: I1009 15:10:34.777041 4762 scope.go:117] "RemoveContainer" containerID="0af15b739bea7fbf1278e62dd74199e5d1d5d6f6de0139edae7d29d154b367bb" Oct 09 15:10:34 crc kubenswrapper[4762]: I1009 15:10:34.922020 4762 scope.go:117] "RemoveContainer" containerID="57bb9f86bf01e6a04c9dba87a6c7b5752afc5d0ab4dc43fd94d8f3b71c315bb0" Oct 09 15:10:35 crc kubenswrapper[4762]: I1009 15:10:35.064891 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-bf89-account-create-xx7sk"] Oct 09 15:10:35 crc kubenswrapper[4762]: I1009 15:10:35.074134 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-bf89-account-create-xx7sk"] Oct 09 15:10:36 crc kubenswrapper[4762]: I1009 15:10:36.084888 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9a0b-account-create-4fpch"] Oct 09 15:10:36 crc kubenswrapper[4762]: I1009 15:10:36.105973 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-93aa-account-create-pfzbf"] Oct 09 15:10:36 crc kubenswrapper[4762]: I1009 15:10:36.115710 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9a0b-account-create-4fpch"] Oct 09 15:10:36 crc kubenswrapper[4762]: I1009 15:10:36.138197 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-93aa-account-create-pfzbf"] Oct 09 15:10:37 crc kubenswrapper[4762]: I1009 15:10:37.018396 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="300c4244-27ca-4d76-b193-06b7fe2b551e" path="/var/lib/kubelet/pods/300c4244-27ca-4d76-b193-06b7fe2b551e/volumes" Oct 09 15:10:37 crc kubenswrapper[4762]: I1009 15:10:37.019965 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cc4bbe7-eeac-43a8-91f3-5549b4429ac9" path="/var/lib/kubelet/pods/4cc4bbe7-eeac-43a8-91f3-5549b4429ac9/volumes" Oct 09 15:10:37 crc kubenswrapper[4762]: I1009 15:10:37.023401 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891a5bab-9068-4d0d-8d35-4892e7d6d48a" path="/var/lib/kubelet/pods/891a5bab-9068-4d0d-8d35-4892e7d6d48a/volumes" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.250014 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g"] Oct 09 15:10:38 crc kubenswrapper[4762]: E1009 15:10:38.250890 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="extract" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.250979 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="extract" Oct 09 15:10:38 crc kubenswrapper[4762]: E1009 15:10:38.250993 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="pull" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.251000 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="pull" Oct 09 15:10:38 crc kubenswrapper[4762]: E1009 15:10:38.253267 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="util" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253295 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="util" Oct 09 15:10:38 crc kubenswrapper[4762]: E1009 15:10:38.253319 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253326 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" Oct 09 15:10:38 crc kubenswrapper[4762]: E1009 15:10:38.253357 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon-log" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253363 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon-log" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253664 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon-log" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253683 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="611c58bf-4757-4af3-adfb-6e13935f07ae" containerName="extract" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.253695 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a961ac97-2010-40c4-9c5a-a68222cb2a8b" containerName="horizon" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.255010 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.261004 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.261164 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.261292 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-7bc8q" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.278427 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.384747 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.388084 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.396895 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-5w2mm" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.397348 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.416153 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q59sc\" (UniqueName: \"kubernetes.io/projected/b162ca85-dc74-4946-b748-31ff9dec85ba-kube-api-access-q59sc\") pod \"obo-prometheus-operator-7c8cf85677-5nq4g\" (UID: \"b162ca85-dc74-4946-b748-31ff9dec85ba\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.429924 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.431582 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.441843 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.453075 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.518381 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.518449 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.518485 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q59sc\" (UniqueName: \"kubernetes.io/projected/b162ca85-dc74-4946-b748-31ff9dec85ba-kube-api-access-q59sc\") pod \"obo-prometheus-operator-7c8cf85677-5nq4g\" (UID: \"b162ca85-dc74-4946-b748-31ff9dec85ba\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.518517 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.518584 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.544315 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q59sc\" (UniqueName: \"kubernetes.io/projected/b162ca85-dc74-4946-b748-31ff9dec85ba-kube-api-access-q59sc\") pod \"obo-prometheus-operator-7c8cf85677-5nq4g\" (UID: \"b162ca85-dc74-4946-b748-31ff9dec85ba\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.613077 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.615721 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9fms8"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.617085 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.619012 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-bgqfl" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.619135 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.620368 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.620407 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.620439 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.620472 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.632794 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.638228 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.648284 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5fe38dbb-430f-436e-a456-13a213f99aba-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7\" (UID: \"5fe38dbb-430f-436e-a456-13a213f99aba\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.648509 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70db938f-6f48-4cb0-b6dd-6d890a018e09-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj\" (UID: \"70db938f-6f48-4cb0-b6dd-6d890a018e09\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.675863 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9fms8"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.714132 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.722710 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58vn6\" (UniqueName: \"kubernetes.io/projected/7b19a695-02c5-421b-bc49-695416a04bfe-kube-api-access-58vn6\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.722822 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b19a695-02c5-421b-bc49-695416a04bfe-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.753610 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.831845 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b19a695-02c5-421b-bc49-695416a04bfe-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.832159 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58vn6\" (UniqueName: \"kubernetes.io/projected/7b19a695-02c5-421b-bc49-695416a04bfe-kube-api-access-58vn6\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.856720 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b19a695-02c5-421b-bc49-695416a04bfe-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.860452 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58vn6\" (UniqueName: \"kubernetes.io/projected/7b19a695-02c5-421b-bc49-695416a04bfe-kube-api-access-58vn6\") pod \"observability-operator-cc5f78dfc-9fms8\" (UID: \"7b19a695-02c5-421b-bc49-695416a04bfe\") " pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.907184 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-nvcqk"] Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.908685 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.920938 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-wk7nc" Oct 09 15:10:38 crc kubenswrapper[4762]: I1009 15:10:38.928236 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-nvcqk"] Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.056194 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b118f530-9c1b-4e99-b633-6759db10e9cb-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.056536 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg6qb\" (UniqueName: \"kubernetes.io/projected/b118f530-9c1b-4e99-b633-6759db10e9cb-kube-api-access-wg6qb\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.155446 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.158677 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b118f530-9c1b-4e99-b633-6759db10e9cb-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.158750 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg6qb\" (UniqueName: \"kubernetes.io/projected/b118f530-9c1b-4e99-b633-6759db10e9cb-kube-api-access-wg6qb\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.160270 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/b118f530-9c1b-4e99-b633-6759db10e9cb-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.184268 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg6qb\" (UniqueName: \"kubernetes.io/projected/b118f530-9c1b-4e99-b633-6759db10e9cb-kube-api-access-wg6qb\") pod \"perses-operator-54bc95c9fb-nvcqk\" (UID: \"b118f530-9c1b-4e99-b633-6759db10e9cb\") " pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.301806 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.393104 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7"] Oct 09 15:10:39 crc kubenswrapper[4762]: W1009 15:10:39.485507 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70db938f_6f48_4cb0_b6dd_6d890a018e09.slice/crio-dbb85815e156fd4a9343b5363de88cb62a8b6c150dce15b9016be12dbc092827 WatchSource:0}: Error finding container dbb85815e156fd4a9343b5363de88cb62a8b6c150dce15b9016be12dbc092827: Status 404 returned error can't find the container with id dbb85815e156fd4a9343b5363de88cb62a8b6c150dce15b9016be12dbc092827 Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.486918 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj"] Oct 09 15:10:39 crc kubenswrapper[4762]: W1009 15:10:39.505948 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb162ca85_dc74_4946_b748_31ff9dec85ba.slice/crio-cb485a3be1279a5bbd9858a583a51562355a91fe1542c8989c9154b5cdd11d02 WatchSource:0}: Error finding container cb485a3be1279a5bbd9858a583a51562355a91fe1542c8989c9154b5cdd11d02: Status 404 returned error can't find the container with id cb485a3be1279a5bbd9858a583a51562355a91fe1542c8989c9154b5cdd11d02 Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.519401 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g"] Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.753574 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-9fms8"] Oct 09 15:10:39 crc kubenswrapper[4762]: W1009 15:10:39.756866 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b19a695_02c5_421b_bc49_695416a04bfe.slice/crio-3589255ee3a3b0b0dab67cc8595e4ed0da6e9b21dad2591a3f4a2957ee422514 WatchSource:0}: Error finding container 3589255ee3a3b0b0dab67cc8595e4ed0da6e9b21dad2591a3f4a2957ee422514: Status 404 returned error can't find the container with id 3589255ee3a3b0b0dab67cc8595e4ed0da6e9b21dad2591a3f4a2957ee422514 Oct 09 15:10:39 crc kubenswrapper[4762]: I1009 15:10:39.882529 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-nvcqk"] Oct 09 15:10:39 crc kubenswrapper[4762]: W1009 15:10:39.886342 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb118f530_9c1b_4e99_b633_6759db10e9cb.slice/crio-3e0c85d3b09a24622e8acd06aa68d105b0b7e24454f1df8a261c78139a24d585 WatchSource:0}: Error finding container 3e0c85d3b09a24622e8acd06aa68d105b0b7e24454f1df8a261c78139a24d585: Status 404 returned error can't find the container with id 3e0c85d3b09a24622e8acd06aa68d105b0b7e24454f1df8a261c78139a24d585 Oct 09 15:10:40 crc kubenswrapper[4762]: I1009 15:10:40.105857 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" event={"ID":"5fe38dbb-430f-436e-a456-13a213f99aba","Type":"ContainerStarted","Data":"19753db137f41db8237985faa5907865dacc212081d336fe1fb231aaa98621f7"} Oct 09 15:10:40 crc kubenswrapper[4762]: I1009 15:10:40.117844 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" event={"ID":"7b19a695-02c5-421b-bc49-695416a04bfe","Type":"ContainerStarted","Data":"3589255ee3a3b0b0dab67cc8595e4ed0da6e9b21dad2591a3f4a2957ee422514"} Oct 09 15:10:40 crc kubenswrapper[4762]: I1009 15:10:40.144200 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" event={"ID":"b162ca85-dc74-4946-b748-31ff9dec85ba","Type":"ContainerStarted","Data":"cb485a3be1279a5bbd9858a583a51562355a91fe1542c8989c9154b5cdd11d02"} Oct 09 15:10:40 crc kubenswrapper[4762]: I1009 15:10:40.146364 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" event={"ID":"b118f530-9c1b-4e99-b633-6759db10e9cb","Type":"ContainerStarted","Data":"3e0c85d3b09a24622e8acd06aa68d105b0b7e24454f1df8a261c78139a24d585"} Oct 09 15:10:40 crc kubenswrapper[4762]: I1009 15:10:40.153826 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" event={"ID":"70db938f-6f48-4cb0-b6dd-6d890a018e09","Type":"ContainerStarted","Data":"dbb85815e156fd4a9343b5363de88cb62a8b6c150dce15b9016be12dbc092827"} Oct 09 15:10:45 crc kubenswrapper[4762]: I1009 15:10:45.082465 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-b62n4"] Oct 09 15:10:45 crc kubenswrapper[4762]: I1009 15:10:45.094248 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-b62n4"] Oct 09 15:10:46 crc kubenswrapper[4762]: I1009 15:10:46.977523 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72b788ee-f766-41de-82e6-95d527473f2d" path="/var/lib/kubelet/pods/72b788ee-f766-41de-82e6-95d527473f2d/volumes" Oct 09 15:10:47 crc kubenswrapper[4762]: I1009 15:10:47.965758 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.265244 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.268421 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" event={"ID":"5fe38dbb-430f-436e-a456-13a213f99aba","Type":"ContainerStarted","Data":"a83e99c17506b3bddb03b017d24a357def3ae9d19ce5183b48f70d4b3280621c"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.270913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" event={"ID":"7b19a695-02c5-421b-bc49-695416a04bfe","Type":"ContainerStarted","Data":"e167191e0d34888255390060513f29a788cca635f912628f23a26a7f6b05a056"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.272294 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.274868 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" event={"ID":"b162ca85-dc74-4946-b748-31ff9dec85ba","Type":"ContainerStarted","Data":"644f8326496230a4ff1ace3b55f38827c8fe5864e4ed43055697be6176e2c48e"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.283809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" event={"ID":"b118f530-9c1b-4e99-b633-6759db10e9cb","Type":"ContainerStarted","Data":"24c3447a43ae76f53aad538d5dc69645799b24a7adaae3366b11a766aa3ec632"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.285900 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" event={"ID":"70db938f-6f48-4cb0-b6dd-6d890a018e09","Type":"ContainerStarted","Data":"a6ffda855890ec5433d79cf5e7249ae6b6543f6a2bfdb5d3d8d843fe596bd3c9"} Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.300571 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.321478 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7" podStartSLOduration=2.515521321 podStartE2EDuration="12.321455473s" podCreationTimestamp="2025-10-09 15:10:38 +0000 UTC" firstStartedPulling="2025-10-09 15:10:39.422968487 +0000 UTC m=+6315.196759516" lastFinishedPulling="2025-10-09 15:10:49.228902629 +0000 UTC m=+6325.002693668" observedRunningTime="2025-10-09 15:10:50.315412366 +0000 UTC m=+6326.089203405" watchObservedRunningTime="2025-10-09 15:10:50.321455473 +0000 UTC m=+6326.095246512" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.358898 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" podStartSLOduration=2.994498602 podStartE2EDuration="12.358875148s" podCreationTimestamp="2025-10-09 15:10:38 +0000 UTC" firstStartedPulling="2025-10-09 15:10:39.888524768 +0000 UTC m=+6315.662315807" lastFinishedPulling="2025-10-09 15:10:49.252901314 +0000 UTC m=+6325.026692353" observedRunningTime="2025-10-09 15:10:50.356933017 +0000 UTC m=+6326.130724046" watchObservedRunningTime="2025-10-09 15:10:50.358875148 +0000 UTC m=+6326.132666187" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.386894 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj" podStartSLOduration=2.662031777 podStartE2EDuration="12.386876067s" podCreationTimestamp="2025-10-09 15:10:38 +0000 UTC" firstStartedPulling="2025-10-09 15:10:39.489444738 +0000 UTC m=+6315.263235777" lastFinishedPulling="2025-10-09 15:10:49.214289028 +0000 UTC m=+6324.988080067" observedRunningTime="2025-10-09 15:10:50.38124518 +0000 UTC m=+6326.155036219" watchObservedRunningTime="2025-10-09 15:10:50.386876067 +0000 UTC m=+6326.160667106" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.443387 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-9fms8" podStartSLOduration=2.927953429 podStartE2EDuration="12.443362797s" podCreationTimestamp="2025-10-09 15:10:38 +0000 UTC" firstStartedPulling="2025-10-09 15:10:39.771084481 +0000 UTC m=+6315.544875520" lastFinishedPulling="2025-10-09 15:10:49.286493849 +0000 UTC m=+6325.060284888" observedRunningTime="2025-10-09 15:10:50.436666392 +0000 UTC m=+6326.210457431" watchObservedRunningTime="2025-10-09 15:10:50.443362797 +0000 UTC m=+6326.217153836" Oct 09 15:10:50 crc kubenswrapper[4762]: I1009 15:10:50.497865 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-5nq4g" podStartSLOduration=2.805949612 podStartE2EDuration="12.497840685s" podCreationTimestamp="2025-10-09 15:10:38 +0000 UTC" firstStartedPulling="2025-10-09 15:10:39.537666163 +0000 UTC m=+6315.311457202" lastFinishedPulling="2025-10-09 15:10:49.229557216 +0000 UTC m=+6325.003348275" observedRunningTime="2025-10-09 15:10:50.466759206 +0000 UTC m=+6326.240550235" watchObservedRunningTime="2025-10-09 15:10:50.497840685 +0000 UTC m=+6326.271631724" Oct 09 15:10:51 crc kubenswrapper[4762]: I1009 15:10:51.297869 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:10:59 crc kubenswrapper[4762]: I1009 15:10:59.309163 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-nvcqk" Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.073406 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dlwwg"] Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.084768 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-xvkwg"] Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.092776 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-xvkwg"] Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.100211 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dlwwg"] Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.980259 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38c6c050-8822-475a-b959-3ea5306a1d21" path="/var/lib/kubelet/pods/38c6c050-8822-475a-b959-3ea5306a1d21/volumes" Oct 09 15:11:00 crc kubenswrapper[4762]: I1009 15:11:00.981560 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="facc7c9e-6fec-46d2-946a-14be514a7699" path="/var/lib/kubelet/pods/facc7c9e-6fec-46d2-946a-14be514a7699/volumes" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.886083 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.886548 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" containerName="openstackclient" containerID="cri-o://576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9" gracePeriod=2 Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.904854 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.956302 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 15:11:01 crc kubenswrapper[4762]: E1009 15:11:01.956756 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" containerName="openstackclient" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.956772 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" containerName="openstackclient" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.956955 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" containerName="openstackclient" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.957894 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.961049 4762 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" podUID="ab0657ec-dc49-40ca-b47f-5b17b550744e" Oct 09 15:11:01 crc kubenswrapper[4762]: I1009 15:11:01.979094 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.093607 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrd6c\" (UniqueName: \"kubernetes.io/projected/ab0657ec-dc49-40ca-b47f-5b17b550744e-kube-api-access-qrd6c\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.093722 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.093769 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.167244 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.171625 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.181048 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-tfdrb" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.187956 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.195642 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrd6c\" (UniqueName: \"kubernetes.io/projected/ab0657ec-dc49-40ca-b47f-5b17b550744e-kube-api-access-qrd6c\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.195920 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.196028 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.196969 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.207264 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ab0657ec-dc49-40ca-b47f-5b17b550744e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.249304 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrd6c\" (UniqueName: \"kubernetes.io/projected/ab0657ec-dc49-40ca-b47f-5b17b550744e-kube-api-access-qrd6c\") pod \"openstackclient\" (UID: \"ab0657ec-dc49-40ca-b47f-5b17b550744e\") " pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.277593 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.309699 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwdrx\" (UniqueName: \"kubernetes.io/projected/4095c002-3425-4536-be08-4adb623d6b61-kube-api-access-lwdrx\") pod \"kube-state-metrics-0\" (UID: \"4095c002-3425-4536-be08-4adb623d6b61\") " pod="openstack/kube-state-metrics-0" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.416169 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwdrx\" (UniqueName: \"kubernetes.io/projected/4095c002-3425-4536-be08-4adb623d6b61-kube-api-access-lwdrx\") pod \"kube-state-metrics-0\" (UID: \"4095c002-3425-4536-be08-4adb623d6b61\") " pod="openstack/kube-state-metrics-0" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.462818 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwdrx\" (UniqueName: \"kubernetes.io/projected/4095c002-3425-4536-be08-4adb623d6b61-kube-api-access-lwdrx\") pod \"kube-state-metrics-0\" (UID: \"4095c002-3425-4536-be08-4adb623d6b61\") " pod="openstack/kube-state-metrics-0" Oct 09 15:11:02 crc kubenswrapper[4762]: I1009 15:11:02.625926 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.049945 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.053504 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.060126 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.060299 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.067272 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.067435 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-7g7xg" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.105780 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152422 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152455 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64j5j\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-kube-api-access-64j5j\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152570 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152701 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152730 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.152779 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262534 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262630 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262674 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64j5j\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-kube-api-access-64j5j\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262741 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262795 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.262815 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.275164 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.284378 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.298579 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.299692 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.303283 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.314403 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64j5j\" (UniqueName: \"kubernetes.io/projected/87c8f7e7-8d9d-49aa-aa9f-9702e83e9331-kube-api-access-64j5j\") pod \"alertmanager-metric-storage-0\" (UID: \"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331\") " pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.383348 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.383880 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.629091 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.631740 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.631998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ab0657ec-dc49-40ca-b47f-5b17b550744e","Type":"ContainerStarted","Data":"b78e1df3428e60e2339c68006f1232351d5f7b05c9b5f56dcb28b6938688be12"} Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.644534 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.644695 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.644620 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.649853 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.650130 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.650470 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-pp5m9" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.655167 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.676213 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708464 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708549 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708670 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqvkn\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-kube-api-access-xqvkn\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708693 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708713 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708741 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708772 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.708802 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810436 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810540 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810691 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqvkn\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-kube-api-access-xqvkn\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810738 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810761 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810820 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810856 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.810899 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.811829 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.820624 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.825549 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.827800 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.830020 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.830818 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.839153 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqvkn\" (UniqueName: \"kubernetes.io/projected/d526887b-e7a2-4ad7-a1f1-1c4f376dac12-kube-api-access-xqvkn\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.845842 4762 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 15:11:03 crc kubenswrapper[4762]: I1009 15:11:03.845997 4762 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bd07a1b4d1292cff030eb8ccefcef569b3d5d9ffb37167c57db2a51166c74af8/globalmount\"" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.025708 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1372ec08-407d-4855-8c90-ce70fa70bcc1\") pod \"prometheus-metric-storage-0\" (UID: \"d526887b-e7a2-4ad7-a1f1-1c4f376dac12\") " pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.260989 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.276300 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.440901 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.543207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt472\" (UniqueName: \"kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472\") pod \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.543376 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config\") pod \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.543575 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret\") pod \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\" (UID: \"8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5\") " Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.554520 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472" (OuterVolumeSpecName: "kube-api-access-nt472") pod "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" (UID: "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5"). InnerVolumeSpecName "kube-api-access-nt472". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.577344 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" (UID: "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.601868 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" (UID: "8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.641103 4762 generic.go:334] "Generic (PLEG): container finished" podID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" containerID="576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9" exitCode=137 Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.641385 4762 scope.go:117] "RemoveContainer" containerID="576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.641552 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.645508 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt472\" (UniqueName: \"kubernetes.io/projected/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-kube-api-access-nt472\") on node \"crc\" DevicePath \"\"" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.645542 4762 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.645554 4762 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.645695 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331","Type":"ContainerStarted","Data":"70a7fae3de788b51d3c076a04c23635b5e4e46dfef2e8dc76f30098275d1895a"} Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.648559 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4095c002-3425-4536-be08-4adb623d6b61","Type":"ContainerStarted","Data":"37f2032e98f4fc8400dce16953dbef8f7b91eb4bbceb674a23b23e733f91ccc4"} Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.648593 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4095c002-3425-4536-be08-4adb623d6b61","Type":"ContainerStarted","Data":"a4697f067669cf1556f5b568317dcec913129cb652965b60de456f3d44cb779c"} Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.650990 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.651869 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ab0657ec-dc49-40ca-b47f-5b17b550744e","Type":"ContainerStarted","Data":"539af94591718bd5b00c0c73c779b90ce250ea198d6d88465caa3418b07c32c6"} Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.673149 4762 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" podUID="ab0657ec-dc49-40ca-b47f-5b17b550744e" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.690241 4762 scope.go:117] "RemoveContainer" containerID="576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9" Oct 09 15:11:04 crc kubenswrapper[4762]: E1009 15:11:04.690825 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9\": container with ID starting with 576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9 not found: ID does not exist" containerID="576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.690862 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9"} err="failed to get container status \"576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9\": rpc error: code = NotFound desc = could not find container \"576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9\": container with ID starting with 576e3d47d7e70c632cd8c49f5a52afacab0d69c3ef73368f5e6c0c2c366e78d9 not found: ID does not exist" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.703085 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.247482825 podStartE2EDuration="2.703064677s" podCreationTimestamp="2025-10-09 15:11:02 +0000 UTC" firstStartedPulling="2025-10-09 15:11:03.68876684 +0000 UTC m=+6339.462557879" lastFinishedPulling="2025-10-09 15:11:04.144348692 +0000 UTC m=+6339.918139731" observedRunningTime="2025-10-09 15:11:04.668096147 +0000 UTC m=+6340.441887186" watchObservedRunningTime="2025-10-09 15:11:04.703064677 +0000 UTC m=+6340.476855716" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.704190 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.704183336 podStartE2EDuration="3.704183336s" podCreationTimestamp="2025-10-09 15:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:11:04.69550169 +0000 UTC m=+6340.469292749" watchObservedRunningTime="2025-10-09 15:11:04.704183336 +0000 UTC m=+6340.477974375" Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.939806 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Oct 09 15:11:04 crc kubenswrapper[4762]: W1009 15:11:04.946837 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd526887b_e7a2_4ad7_a1f1_1c4f376dac12.slice/crio-0cd16378d7945d9e391ace28a7a82edeb9126976b731925f2f85472318a48335 WatchSource:0}: Error finding container 0cd16378d7945d9e391ace28a7a82edeb9126976b731925f2f85472318a48335: Status 404 returned error can't find the container with id 0cd16378d7945d9e391ace28a7a82edeb9126976b731925f2f85472318a48335 Oct 09 15:11:04 crc kubenswrapper[4762]: I1009 15:11:04.977382 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5" path="/var/lib/kubelet/pods/8ccbdaa6-45c2-47c8-94aa-2f0a49e14fb5/volumes" Oct 09 15:11:05 crc kubenswrapper[4762]: I1009 15:11:05.664927 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerStarted","Data":"0cd16378d7945d9e391ace28a7a82edeb9126976b731925f2f85472318a48335"} Oct 09 15:11:11 crc kubenswrapper[4762]: I1009 15:11:11.739082 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerStarted","Data":"c57c457887970e6de58095bd6c6536a6bfecbb20d782ca07ca008e424f211f9f"} Oct 09 15:11:11 crc kubenswrapper[4762]: I1009 15:11:11.740951 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331","Type":"ContainerStarted","Data":"a5aeab736c3f74c46fdb1212551ffab08f2402abeacfc5d5aceb336b1c73d706"} Oct 09 15:11:12 crc kubenswrapper[4762]: I1009 15:11:12.632294 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 09 15:11:17 crc kubenswrapper[4762]: I1009 15:11:17.823020 4762 generic.go:334] "Generic (PLEG): container finished" podID="87c8f7e7-8d9d-49aa-aa9f-9702e83e9331" containerID="a5aeab736c3f74c46fdb1212551ffab08f2402abeacfc5d5aceb336b1c73d706" exitCode=0 Oct 09 15:11:17 crc kubenswrapper[4762]: I1009 15:11:17.823095 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331","Type":"ContainerDied","Data":"a5aeab736c3f74c46fdb1212551ffab08f2402abeacfc5d5aceb336b1c73d706"} Oct 09 15:11:18 crc kubenswrapper[4762]: I1009 15:11:18.838888 4762 generic.go:334] "Generic (PLEG): container finished" podID="d526887b-e7a2-4ad7-a1f1-1c4f376dac12" containerID="c57c457887970e6de58095bd6c6536a6bfecbb20d782ca07ca008e424f211f9f" exitCode=0 Oct 09 15:11:18 crc kubenswrapper[4762]: I1009 15:11:18.838982 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerDied","Data":"c57c457887970e6de58095bd6c6536a6bfecbb20d782ca07ca008e424f211f9f"} Oct 09 15:11:19 crc kubenswrapper[4762]: I1009 15:11:19.045157 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-gzfmt"] Oct 09 15:11:19 crc kubenswrapper[4762]: I1009 15:11:19.054731 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-gzfmt"] Oct 09 15:11:20 crc kubenswrapper[4762]: I1009 15:11:20.859757 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331","Type":"ContainerStarted","Data":"a2c5ea5a633a01a2327bdea47e7564aeaa7c6152b581e79931458583725cb829"} Oct 09 15:11:20 crc kubenswrapper[4762]: I1009 15:11:20.978330 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2422116-f72c-42ae-83b2-084763842592" path="/var/lib/kubelet/pods/b2422116-f72c-42ae-83b2-084763842592/volumes" Oct 09 15:11:25 crc kubenswrapper[4762]: I1009 15:11:25.928272 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"87c8f7e7-8d9d-49aa-aa9f-9702e83e9331","Type":"ContainerStarted","Data":"0370f32bdeb4bd9db9c437d21bb411076303fdd551053a23bae324669901a7fc"} Oct 09 15:11:25 crc kubenswrapper[4762]: I1009 15:11:25.951912 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=8.59418371 podStartE2EDuration="23.951888364s" podCreationTimestamp="2025-10-09 15:11:02 +0000 UTC" firstStartedPulling="2025-10-09 15:11:04.302732425 +0000 UTC m=+6340.076523464" lastFinishedPulling="2025-10-09 15:11:19.660437069 +0000 UTC m=+6355.434228118" observedRunningTime="2025-10-09 15:11:25.94828001 +0000 UTC m=+6361.722071069" watchObservedRunningTime="2025-10-09 15:11:25.951888364 +0000 UTC m=+6361.725679403" Oct 09 15:11:26 crc kubenswrapper[4762]: I1009 15:11:26.938521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerStarted","Data":"5c3a83d078466626e0e091b143cd00b09a531d318c9c3a8f21cf52a780e8deec"} Oct 09 15:11:26 crc kubenswrapper[4762]: I1009 15:11:26.940725 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:26 crc kubenswrapper[4762]: I1009 15:11:26.941293 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Oct 09 15:11:31 crc kubenswrapper[4762]: I1009 15:11:31.003944 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerStarted","Data":"c4e953143f7d65cd6c055080abefa0820da05326a679a453eec015633f04fedd"} Oct 09 15:11:34 crc kubenswrapper[4762]: I1009 15:11:34.032833 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"d526887b-e7a2-4ad7-a1f1-1c4f376dac12","Type":"ContainerStarted","Data":"d33b49da7c0c70cf2d022df2da9a7fdac2fd4bfdd4704f05157b95e69d4ba052"} Oct 09 15:11:34 crc kubenswrapper[4762]: I1009 15:11:34.059259 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.756859422 podStartE2EDuration="32.059240334s" podCreationTimestamp="2025-10-09 15:11:02 +0000 UTC" firstStartedPulling="2025-10-09 15:11:04.948787554 +0000 UTC m=+6340.722578593" lastFinishedPulling="2025-10-09 15:11:33.251168466 +0000 UTC m=+6369.024959505" observedRunningTime="2025-10-09 15:11:34.054519491 +0000 UTC m=+6369.828310530" watchObservedRunningTime="2025-10-09 15:11:34.059240334 +0000 UTC m=+6369.833031373" Oct 09 15:11:34 crc kubenswrapper[4762]: I1009 15:11:34.262987 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:34 crc kubenswrapper[4762]: I1009 15:11:34.263083 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:34 crc kubenswrapper[4762]: I1009 15:11:34.266455 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.041873 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.204149 4762 scope.go:117] "RemoveContainer" containerID="3a79cb904b5e0ed0cae14c8432257ce531fe9a31c281860d08e2d41291e2d99f" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.229017 4762 scope.go:117] "RemoveContainer" containerID="5bdcd51332ca5a6e7ca72c220021aecd3622b5dafc76a33f224fca303f4cc673" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.302019 4762 scope.go:117] "RemoveContainer" containerID="2978feb96559cfc45dc0e290c0d2243b5ba1cd4945be26e3c592f451540755e5" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.359301 4762 scope.go:117] "RemoveContainer" containerID="f29f9ad0f1d37d93ffeab4ac6759cec89c36c1a4d5d98cb8254d3eb747786fe4" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.399585 4762 scope.go:117] "RemoveContainer" containerID="c6cd31d9581e951d64f5fefcb58c1bd8dbc0109ae37cdf49d5e929f69962296f" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.437545 4762 scope.go:117] "RemoveContainer" containerID="92e92be78fea06552eb6b46a13415dfcb98451a1b057a30429a01f52f0b2915c" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.472804 4762 scope.go:117] "RemoveContainer" containerID="948ad3f235cf70edcf61eebcfc21a1726ff6620bb4364fa31c4ca44714c931fe" Oct 09 15:11:35 crc kubenswrapper[4762]: I1009 15:11:35.505161 4762 scope.go:117] "RemoveContainer" containerID="af17c606dfd5e635986fc171df74b00b82ed945d433aa6ab1252781b3db646ec" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.276116 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.282947 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.295138 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.295333 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.313552 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454077 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454219 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqp2p\" (UniqueName: \"kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454253 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454294 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454434 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454468 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.454506 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.560897 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.560968 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.561084 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.561114 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.561148 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.561199 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.561295 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqp2p\" (UniqueName: \"kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.563850 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.566380 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.597318 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.597679 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.602261 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqp2p\" (UniqueName: \"kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.602681 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.607920 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts\") pod \"ceilometer-0\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " pod="openstack/ceilometer-0" Oct 09 15:11:41 crc kubenswrapper[4762]: I1009 15:11:41.613242 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:11:42 crc kubenswrapper[4762]: I1009 15:11:42.259647 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:11:43 crc kubenswrapper[4762]: I1009 15:11:43.148084 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerStarted","Data":"3b6dcd55dde85d9a1a8b6250b52c910ab9a06ee861ddf4201468afebd8627561"} Oct 09 15:11:44 crc kubenswrapper[4762]: I1009 15:11:44.161550 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerStarted","Data":"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a"} Oct 09 15:11:45 crc kubenswrapper[4762]: I1009 15:11:45.181079 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerStarted","Data":"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8"} Oct 09 15:11:46 crc kubenswrapper[4762]: I1009 15:11:46.195696 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerStarted","Data":"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0"} Oct 09 15:11:48 crc kubenswrapper[4762]: I1009 15:11:48.217898 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerStarted","Data":"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875"} Oct 09 15:11:48 crc kubenswrapper[4762]: I1009 15:11:48.219745 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.696502 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.691392208 podStartE2EDuration="11.69648136s" podCreationTimestamp="2025-10-09 15:11:41 +0000 UTC" firstStartedPulling="2025-10-09 15:11:42.273150778 +0000 UTC m=+6378.046941817" lastFinishedPulling="2025-10-09 15:11:47.27823993 +0000 UTC m=+6383.052030969" observedRunningTime="2025-10-09 15:11:48.242567676 +0000 UTC m=+6384.016358725" watchObservedRunningTime="2025-10-09 15:11:52.69648136 +0000 UTC m=+6388.470272399" Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.706872 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-nfz4s"] Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.708611 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.732896 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-nfz4s"] Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.820966 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6jww\" (UniqueName: \"kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww\") pod \"aodh-db-create-nfz4s\" (UID: \"0be367a0-7dc6-4755-82c0-79787634bd41\") " pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.923325 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6jww\" (UniqueName: \"kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww\") pod \"aodh-db-create-nfz4s\" (UID: \"0be367a0-7dc6-4755-82c0-79787634bd41\") " pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:52 crc kubenswrapper[4762]: I1009 15:11:52.942320 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6jww\" (UniqueName: \"kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww\") pod \"aodh-db-create-nfz4s\" (UID: \"0be367a0-7dc6-4755-82c0-79787634bd41\") " pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:53 crc kubenswrapper[4762]: I1009 15:11:53.034084 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:53 crc kubenswrapper[4762]: I1009 15:11:53.795169 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-nfz4s"] Oct 09 15:11:54 crc kubenswrapper[4762]: I1009 15:11:54.283293 4762 generic.go:334] "Generic (PLEG): container finished" podID="0be367a0-7dc6-4755-82c0-79787634bd41" containerID="2580f2a71cb097c72e814fb8735375837aadd8a5b3bafabad554ca365ee0e926" exitCode=0 Oct 09 15:11:54 crc kubenswrapper[4762]: I1009 15:11:54.283337 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-nfz4s" event={"ID":"0be367a0-7dc6-4755-82c0-79787634bd41","Type":"ContainerDied","Data":"2580f2a71cb097c72e814fb8735375837aadd8a5b3bafabad554ca365ee0e926"} Oct 09 15:11:54 crc kubenswrapper[4762]: I1009 15:11:54.283363 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-nfz4s" event={"ID":"0be367a0-7dc6-4755-82c0-79787634bd41","Type":"ContainerStarted","Data":"d82452033fb8e01b16269985011757d8bd27ad0a3d85a0ac22aa722bf58c0579"} Oct 09 15:11:55 crc kubenswrapper[4762]: I1009 15:11:55.690433 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:55 crc kubenswrapper[4762]: I1009 15:11:55.794105 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6jww\" (UniqueName: \"kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww\") pod \"0be367a0-7dc6-4755-82c0-79787634bd41\" (UID: \"0be367a0-7dc6-4755-82c0-79787634bd41\") " Oct 09 15:11:55 crc kubenswrapper[4762]: I1009 15:11:55.801369 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww" (OuterVolumeSpecName: "kube-api-access-j6jww") pod "0be367a0-7dc6-4755-82c0-79787634bd41" (UID: "0be367a0-7dc6-4755-82c0-79787634bd41"). InnerVolumeSpecName "kube-api-access-j6jww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:11:55 crc kubenswrapper[4762]: I1009 15:11:55.895870 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6jww\" (UniqueName: \"kubernetes.io/projected/0be367a0-7dc6-4755-82c0-79787634bd41-kube-api-access-j6jww\") on node \"crc\" DevicePath \"\"" Oct 09 15:11:56 crc kubenswrapper[4762]: I1009 15:11:56.303591 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-nfz4s" Oct 09 15:11:56 crc kubenswrapper[4762]: I1009 15:11:56.303755 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-nfz4s" event={"ID":"0be367a0-7dc6-4755-82c0-79787634bd41","Type":"ContainerDied","Data":"d82452033fb8e01b16269985011757d8bd27ad0a3d85a0ac22aa722bf58c0579"} Oct 09 15:11:56 crc kubenswrapper[4762]: I1009 15:11:56.303795 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d82452033fb8e01b16269985011757d8bd27ad0a3d85a0ac22aa722bf58c0579" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.023844 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mstc5"] Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.032655 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mstc5"] Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.683367 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-2e2d-account-create-487jb"] Oct 09 15:12:02 crc kubenswrapper[4762]: E1009 15:12:02.684147 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0be367a0-7dc6-4755-82c0-79787634bd41" containerName="mariadb-database-create" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.684164 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0be367a0-7dc6-4755-82c0-79787634bd41" containerName="mariadb-database-create" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.684395 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0be367a0-7dc6-4755-82c0-79787634bd41" containerName="mariadb-database-create" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.685357 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.689979 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.694410 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-2e2d-account-create-487jb"] Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.754961 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dg2s\" (UniqueName: \"kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s\") pod \"aodh-2e2d-account-create-487jb\" (UID: \"3e349088-1b04-404e-ad25-3f0bb4117810\") " pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.856838 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dg2s\" (UniqueName: \"kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s\") pod \"aodh-2e2d-account-create-487jb\" (UID: \"3e349088-1b04-404e-ad25-3f0bb4117810\") " pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.880831 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dg2s\" (UniqueName: \"kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s\") pod \"aodh-2e2d-account-create-487jb\" (UID: \"3e349088-1b04-404e-ad25-3f0bb4117810\") " pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:02 crc kubenswrapper[4762]: I1009 15:12:02.986862 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b169a81-9531-49df-aa7c-3009e7e9c41a" path="/var/lib/kubelet/pods/1b169a81-9531-49df-aa7c-3009e7e9c41a/volumes" Oct 09 15:12:03 crc kubenswrapper[4762]: I1009 15:12:03.007907 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:03 crc kubenswrapper[4762]: I1009 15:12:03.512601 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-2e2d-account-create-487jb"] Oct 09 15:12:04 crc kubenswrapper[4762]: I1009 15:12:04.382420 4762 generic.go:334] "Generic (PLEG): container finished" podID="3e349088-1b04-404e-ad25-3f0bb4117810" containerID="5b1621bc12e5f6d588e66e567c310e485fea85a69f987292d36d8aab7d8c34ea" exitCode=0 Oct 09 15:12:04 crc kubenswrapper[4762]: I1009 15:12:04.382521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-2e2d-account-create-487jb" event={"ID":"3e349088-1b04-404e-ad25-3f0bb4117810","Type":"ContainerDied","Data":"5b1621bc12e5f6d588e66e567c310e485fea85a69f987292d36d8aab7d8c34ea"} Oct 09 15:12:04 crc kubenswrapper[4762]: I1009 15:12:04.382741 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-2e2d-account-create-487jb" event={"ID":"3e349088-1b04-404e-ad25-3f0bb4117810","Type":"ContainerStarted","Data":"15d65b557d908cf98ddef75a040cd98afdb406c4ec42e1bfa43b9eee3a11ea1a"} Oct 09 15:12:05 crc kubenswrapper[4762]: I1009 15:12:05.837146 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:05 crc kubenswrapper[4762]: I1009 15:12:05.943903 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dg2s\" (UniqueName: \"kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s\") pod \"3e349088-1b04-404e-ad25-3f0bb4117810\" (UID: \"3e349088-1b04-404e-ad25-3f0bb4117810\") " Oct 09 15:12:05 crc kubenswrapper[4762]: I1009 15:12:05.949136 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s" (OuterVolumeSpecName: "kube-api-access-2dg2s") pod "3e349088-1b04-404e-ad25-3f0bb4117810" (UID: "3e349088-1b04-404e-ad25-3f0bb4117810"). InnerVolumeSpecName "kube-api-access-2dg2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:12:06 crc kubenswrapper[4762]: I1009 15:12:06.060328 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dg2s\" (UniqueName: \"kubernetes.io/projected/3e349088-1b04-404e-ad25-3f0bb4117810-kube-api-access-2dg2s\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:06 crc kubenswrapper[4762]: I1009 15:12:06.402335 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-2e2d-account-create-487jb" event={"ID":"3e349088-1b04-404e-ad25-3f0bb4117810","Type":"ContainerDied","Data":"15d65b557d908cf98ddef75a040cd98afdb406c4ec42e1bfa43b9eee3a11ea1a"} Oct 09 15:12:06 crc kubenswrapper[4762]: I1009 15:12:06.402378 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-2e2d-account-create-487jb" Oct 09 15:12:06 crc kubenswrapper[4762]: I1009 15:12:06.402380 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15d65b557d908cf98ddef75a040cd98afdb406c4ec42e1bfa43b9eee3a11ea1a" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.926980 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-68lbb"] Oct 09 15:12:07 crc kubenswrapper[4762]: E1009 15:12:07.927896 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e349088-1b04-404e-ad25-3f0bb4117810" containerName="mariadb-account-create" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.927920 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e349088-1b04-404e-ad25-3f0bb4117810" containerName="mariadb-account-create" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.928188 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e349088-1b04-404e-ad25-3f0bb4117810" containerName="mariadb-account-create" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.929176 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.932258 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.932281 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.932393 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-8xjvj" Oct 09 15:12:07 crc kubenswrapper[4762]: I1009 15:12:07.939924 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-68lbb"] Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.104146 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.104255 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.104397 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.104474 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4j9m\" (UniqueName: \"kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.207261 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.207348 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.207373 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.207391 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4j9m\" (UniqueName: \"kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.216506 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.217279 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.227786 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.231592 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4j9m\" (UniqueName: \"kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m\") pod \"aodh-db-sync-68lbb\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.251031 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:08 crc kubenswrapper[4762]: I1009 15:12:08.764301 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-68lbb"] Oct 09 15:12:09 crc kubenswrapper[4762]: I1009 15:12:09.431412 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-68lbb" event={"ID":"b82bad80-25e4-41ef-a414-1dd5a1c9c577","Type":"ContainerStarted","Data":"7fe57b146deba3d08ebe30104fb40ab042f55c4677996bb95c2b55f30e6095a2"} Oct 09 15:12:11 crc kubenswrapper[4762]: I1009 15:12:11.661113 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 15:12:12 crc kubenswrapper[4762]: I1009 15:12:12.035211 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-cd9d-account-create-9x98d"] Oct 09 15:12:12 crc kubenswrapper[4762]: I1009 15:12:12.044083 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-cd9d-account-create-9x98d"] Oct 09 15:12:12 crc kubenswrapper[4762]: I1009 15:12:12.980817 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b" path="/var/lib/kubelet/pods/dfc1d3b0-b2d3-4b87-81bb-6d9bf69df68b/volumes" Oct 09 15:12:14 crc kubenswrapper[4762]: I1009 15:12:14.485053 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-68lbb" event={"ID":"b82bad80-25e4-41ef-a414-1dd5a1c9c577","Type":"ContainerStarted","Data":"b7b2283e25a389853dabd89197c281a01c494836b17a9650c3793b4c70bcfe2e"} Oct 09 15:12:14 crc kubenswrapper[4762]: I1009 15:12:14.517382 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-68lbb" podStartSLOduration=2.8507851889999998 podStartE2EDuration="7.51736144s" podCreationTimestamp="2025-10-09 15:12:07 +0000 UTC" firstStartedPulling="2025-10-09 15:12:08.773852872 +0000 UTC m=+6404.547643921" lastFinishedPulling="2025-10-09 15:12:13.440429113 +0000 UTC m=+6409.214220172" observedRunningTime="2025-10-09 15:12:14.500125532 +0000 UTC m=+6410.273916581" watchObservedRunningTime="2025-10-09 15:12:14.51736144 +0000 UTC m=+6410.291152479" Oct 09 15:12:16 crc kubenswrapper[4762]: I1009 15:12:16.511435 4762 generic.go:334] "Generic (PLEG): container finished" podID="b82bad80-25e4-41ef-a414-1dd5a1c9c577" containerID="b7b2283e25a389853dabd89197c281a01c494836b17a9650c3793b4c70bcfe2e" exitCode=0 Oct 09 15:12:16 crc kubenswrapper[4762]: I1009 15:12:16.511686 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-68lbb" event={"ID":"b82bad80-25e4-41ef-a414-1dd5a1c9c577","Type":"ContainerDied","Data":"b7b2283e25a389853dabd89197c281a01c494836b17a9650c3793b4c70bcfe2e"} Oct 09 15:12:17 crc kubenswrapper[4762]: I1009 15:12:17.961921 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.056724 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4j9m\" (UniqueName: \"kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m\") pod \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.056833 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data\") pod \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.056921 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts\") pod \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.057023 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle\") pod \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\" (UID: \"b82bad80-25e4-41ef-a414-1dd5a1c9c577\") " Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.062881 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m" (OuterVolumeSpecName: "kube-api-access-f4j9m") pod "b82bad80-25e4-41ef-a414-1dd5a1c9c577" (UID: "b82bad80-25e4-41ef-a414-1dd5a1c9c577"). InnerVolumeSpecName "kube-api-access-f4j9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.063263 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts" (OuterVolumeSpecName: "scripts") pod "b82bad80-25e4-41ef-a414-1dd5a1c9c577" (UID: "b82bad80-25e4-41ef-a414-1dd5a1c9c577"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.096032 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data" (OuterVolumeSpecName: "config-data") pod "b82bad80-25e4-41ef-a414-1dd5a1c9c577" (UID: "b82bad80-25e4-41ef-a414-1dd5a1c9c577"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.110292 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b82bad80-25e4-41ef-a414-1dd5a1c9c577" (UID: "b82bad80-25e4-41ef-a414-1dd5a1c9c577"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.160394 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4j9m\" (UniqueName: \"kubernetes.io/projected/b82bad80-25e4-41ef-a414-1dd5a1c9c577-kube-api-access-f4j9m\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.160438 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.160450 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.160463 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82bad80-25e4-41ef-a414-1dd5a1c9c577-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.538718 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-68lbb" event={"ID":"b82bad80-25e4-41ef-a414-1dd5a1c9c577","Type":"ContainerDied","Data":"7fe57b146deba3d08ebe30104fb40ab042f55c4677996bb95c2b55f30e6095a2"} Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.538759 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fe57b146deba3d08ebe30104fb40ab042f55c4677996bb95c2b55f30e6095a2" Oct 09 15:12:18 crc kubenswrapper[4762]: I1009 15:12:18.539272 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-68lbb" Oct 09 15:12:20 crc kubenswrapper[4762]: I1009 15:12:20.030594 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-2pr7h"] Oct 09 15:12:20 crc kubenswrapper[4762]: I1009 15:12:20.041199 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-2pr7h"] Oct 09 15:12:20 crc kubenswrapper[4762]: I1009 15:12:20.986406 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc9a1cfc-3f68-493e-b65e-e3eef9864e9f" path="/var/lib/kubelet/pods/dc9a1cfc-3f68-493e-b65e-e3eef9864e9f/volumes" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.012536 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Oct 09 15:12:23 crc kubenswrapper[4762]: E1009 15:12:23.013129 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82bad80-25e4-41ef-a414-1dd5a1c9c577" containerName="aodh-db-sync" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.013148 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82bad80-25e4-41ef-a414-1dd5a1c9c577" containerName="aodh-db-sync" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.013404 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b82bad80-25e4-41ef-a414-1dd5a1c9c577" containerName="aodh-db-sync" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.015715 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.019568 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.019852 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-8xjvj" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.019977 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.026437 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.168721 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-scripts\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.168957 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.169087 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-config-data\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.169238 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s2sf\" (UniqueName: \"kubernetes.io/projected/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-kube-api-access-2s2sf\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.271001 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-scripts\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.271063 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.271137 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-config-data\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.271228 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s2sf\" (UniqueName: \"kubernetes.io/projected/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-kube-api-access-2s2sf\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.276884 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-combined-ca-bundle\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.286727 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-scripts\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.287537 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-config-data\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.289750 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s2sf\" (UniqueName: \"kubernetes.io/projected/ca316e06-1d0d-4b25-88ee-ca4c74f0f48c-kube-api-access-2s2sf\") pod \"aodh-0\" (UID: \"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c\") " pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.341415 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Oct 09 15:12:23 crc kubenswrapper[4762]: I1009 15:12:23.903650 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Oct 09 15:12:24 crc kubenswrapper[4762]: I1009 15:12:24.602845 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c","Type":"ContainerStarted","Data":"62ad7d5a4beed09db3c18d96e3e1ede719604fd284c8602eb8957ff41f882ffe"} Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.540158 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.540805 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-central-agent" containerID="cri-o://1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a" gracePeriod=30 Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.541010 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="proxy-httpd" containerID="cri-o://cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875" gracePeriod=30 Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.541144 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="sg-core" containerID="cri-o://dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0" gracePeriod=30 Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.541381 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-notification-agent" containerID="cri-o://6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8" gracePeriod=30 Oct 09 15:12:25 crc kubenswrapper[4762]: I1009 15:12:25.612690 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c","Type":"ContainerStarted","Data":"d386923bf03e39f8e9046627505910728d43d2a01af30fbb0c4ebb5b8a87c008"} Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.622290 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c","Type":"ContainerStarted","Data":"e46306bba9f8bfd0e523eec1f085deee13aebd66ef009cb7f426943ad0332089"} Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624760 4762 generic.go:334] "Generic (PLEG): container finished" podID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerID="cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875" exitCode=0 Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624793 4762 generic.go:334] "Generic (PLEG): container finished" podID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerID="dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0" exitCode=2 Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624800 4762 generic.go:334] "Generic (PLEG): container finished" podID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerID="1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a" exitCode=0 Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624819 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerDied","Data":"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875"} Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624843 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerDied","Data":"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0"} Oct 09 15:12:26 crc kubenswrapper[4762]: I1009 15:12:26.624853 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerDied","Data":"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a"} Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.434740 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596254 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596308 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596348 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596417 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqp2p\" (UniqueName: \"kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596496 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596674 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596697 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd\") pod \"91dcf017-2016-47ff-8b2f-2af30588ced3\" (UID: \"91dcf017-2016-47ff-8b2f-2af30588ced3\") " Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.596961 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.597387 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.597774 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.601656 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p" (OuterVolumeSpecName: "kube-api-access-tqp2p") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "kube-api-access-tqp2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.602434 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts" (OuterVolumeSpecName: "scripts") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.628203 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.658019 4762 generic.go:334] "Generic (PLEG): container finished" podID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerID="6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8" exitCode=0 Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.658079 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerDied","Data":"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8"} Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.658105 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"91dcf017-2016-47ff-8b2f-2af30588ced3","Type":"ContainerDied","Data":"3b6dcd55dde85d9a1a8b6250b52c910ab9a06ee861ddf4201468afebd8627561"} Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.658122 4762 scope.go:117] "RemoveContainer" containerID="cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.658232 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.661931 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c","Type":"ContainerStarted","Data":"b7443f21ed8adf3560e6223fb7679e1987547cc3db084e8adbb808612cd2773d"} Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.685744 4762 scope.go:117] "RemoveContainer" containerID="dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.690431 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.699911 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.699944 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqp2p\" (UniqueName: \"kubernetes.io/projected/91dcf017-2016-47ff-8b2f-2af30588ced3-kube-api-access-tqp2p\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.699957 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.699967 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/91dcf017-2016-47ff-8b2f-2af30588ced3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.699977 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.710430 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data" (OuterVolumeSpecName: "config-data") pod "91dcf017-2016-47ff-8b2f-2af30588ced3" (UID: "91dcf017-2016-47ff-8b2f-2af30588ced3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.801662 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91dcf017-2016-47ff-8b2f-2af30588ced3-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.811000 4762 scope.go:117] "RemoveContainer" containerID="6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.837622 4762 scope.go:117] "RemoveContainer" containerID="1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.862661 4762 scope.go:117] "RemoveContainer" containerID="cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875" Oct 09 15:12:28 crc kubenswrapper[4762]: E1009 15:12:28.863044 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875\": container with ID starting with cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875 not found: ID does not exist" containerID="cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863070 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875"} err="failed to get container status \"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875\": rpc error: code = NotFound desc = could not find container \"cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875\": container with ID starting with cff43f46cbd8c31b63ba9ece9575e75280db90b64417b966ddbed0cf8acd1875 not found: ID does not exist" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863089 4762 scope.go:117] "RemoveContainer" containerID="dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0" Oct 09 15:12:28 crc kubenswrapper[4762]: E1009 15:12:28.863297 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0\": container with ID starting with dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0 not found: ID does not exist" containerID="dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863318 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0"} err="failed to get container status \"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0\": rpc error: code = NotFound desc = could not find container \"dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0\": container with ID starting with dbb2ecc3a8be64446db23d2728ea1189fce6d2b05cf13417797547a674c728f0 not found: ID does not exist" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863330 4762 scope.go:117] "RemoveContainer" containerID="6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8" Oct 09 15:12:28 crc kubenswrapper[4762]: E1009 15:12:28.863509 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8\": container with ID starting with 6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8 not found: ID does not exist" containerID="6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863553 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8"} err="failed to get container status \"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8\": rpc error: code = NotFound desc = could not find container \"6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8\": container with ID starting with 6a954bfbdc82d4b8a2a4e97bacb61aa65f975bba29bd716f7655f290fa7cc4c8 not found: ID does not exist" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.863574 4762 scope.go:117] "RemoveContainer" containerID="1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a" Oct 09 15:12:28 crc kubenswrapper[4762]: E1009 15:12:28.864074 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a\": container with ID starting with 1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a not found: ID does not exist" containerID="1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a" Oct 09 15:12:28 crc kubenswrapper[4762]: I1009 15:12:28.864101 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a"} err="failed to get container status \"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a\": rpc error: code = NotFound desc = could not find container \"1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a\": container with ID starting with 1896754f9eeb90c23c6be063023898d3d8bb984e7ce98d45e6b4e1776487a80a not found: ID does not exist" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.007929 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.016927 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.027544 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:29 crc kubenswrapper[4762]: E1009 15:12:29.028164 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="sg-core" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028188 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="sg-core" Oct 09 15:12:29 crc kubenswrapper[4762]: E1009 15:12:29.028213 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="proxy-httpd" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028222 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="proxy-httpd" Oct 09 15:12:29 crc kubenswrapper[4762]: E1009 15:12:29.028260 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-central-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028268 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-central-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: E1009 15:12:29.028286 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-notification-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028296 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-notification-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028568 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="sg-core" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028600 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-notification-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028616 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="ceilometer-central-agent" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.028651 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" containerName="proxy-httpd" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.031228 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.037246 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.037442 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.043889 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.208857 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxqp4\" (UniqueName: \"kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.209138 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.209470 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.209916 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.209955 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.210041 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.210123 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312490 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312531 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312571 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312609 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312683 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxqp4\" (UniqueName: \"kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312712 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.312756 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.315369 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.316228 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.316927 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.316976 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.317527 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.320095 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.333393 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxqp4\" (UniqueName: \"kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4\") pod \"ceilometer-0\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.371686 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.680168 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"ca316e06-1d0d-4b25-88ee-ca4c74f0f48c","Type":"ContainerStarted","Data":"4c86d5a74e409debdec5f5c7d3d7b447ab43296e534885ebc286a60eb98e2416"} Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.723427 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.357903282 podStartE2EDuration="7.723408098s" podCreationTimestamp="2025-10-09 15:12:22 +0000 UTC" firstStartedPulling="2025-10-09 15:12:23.917910877 +0000 UTC m=+6419.691701916" lastFinishedPulling="2025-10-09 15:12:29.283415693 +0000 UTC m=+6425.057206732" observedRunningTime="2025-10-09 15:12:29.700145863 +0000 UTC m=+6425.473936912" watchObservedRunningTime="2025-10-09 15:12:29.723408098 +0000 UTC m=+6425.497199137" Oct 09 15:12:29 crc kubenswrapper[4762]: I1009 15:12:29.905308 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:12:29 crc kubenswrapper[4762]: W1009 15:12:29.910056 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2aace88_df6f_4158_8062_66bfb82a54e5.slice/crio-59e4a2f3a92378e93ba951688950441de49b536b2883cc617147d36dd55ffdd0 WatchSource:0}: Error finding container 59e4a2f3a92378e93ba951688950441de49b536b2883cc617147d36dd55ffdd0: Status 404 returned error can't find the container with id 59e4a2f3a92378e93ba951688950441de49b536b2883cc617147d36dd55ffdd0 Oct 09 15:12:30 crc kubenswrapper[4762]: I1009 15:12:30.691572 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerStarted","Data":"c7a1e98749e8db06144d443eb938dc6016cd01492ad3d101434c0b1d63163cdf"} Oct 09 15:12:30 crc kubenswrapper[4762]: I1009 15:12:30.691914 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerStarted","Data":"59e4a2f3a92378e93ba951688950441de49b536b2883cc617147d36dd55ffdd0"} Oct 09 15:12:30 crc kubenswrapper[4762]: I1009 15:12:30.979428 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91dcf017-2016-47ff-8b2f-2af30588ced3" path="/var/lib/kubelet/pods/91dcf017-2016-47ff-8b2f-2af30588ced3/volumes" Oct 09 15:12:31 crc kubenswrapper[4762]: I1009 15:12:31.700822 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerStarted","Data":"b2f46e696cca0167f76651362afb3bab83b00ef9177a65e3fca3e9bb08362ce9"} Oct 09 15:12:32 crc kubenswrapper[4762]: I1009 15:12:32.712229 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerStarted","Data":"ece7e3e3d61c8e388cce7606cff35602b1dd34013208ef72a103652274959a2d"} Oct 09 15:12:34 crc kubenswrapper[4762]: I1009 15:12:34.744724 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerStarted","Data":"83bb14ab2f1bca988fc6f192db2c2588506bfc5fc9927d20b9c3a5575e7b1dd2"} Oct 09 15:12:34 crc kubenswrapper[4762]: I1009 15:12:34.745323 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 15:12:34 crc kubenswrapper[4762]: I1009 15:12:34.773341 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.932281048 podStartE2EDuration="6.773323798s" podCreationTimestamp="2025-10-09 15:12:28 +0000 UTC" firstStartedPulling="2025-10-09 15:12:29.912304815 +0000 UTC m=+6425.686095854" lastFinishedPulling="2025-10-09 15:12:33.753347555 +0000 UTC m=+6429.527138604" observedRunningTime="2025-10-09 15:12:34.765107174 +0000 UTC m=+6430.538898203" watchObservedRunningTime="2025-10-09 15:12:34.773323798 +0000 UTC m=+6430.547114837" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.681441 4762 scope.go:117] "RemoveContainer" containerID="72b64cc39ceea069396f8934db28b6d909b2efe6402843ec664d576a17929d72" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.720062 4762 scope.go:117] "RemoveContainer" containerID="9ad1ef9522e7c1f08eab2e273435f3dc49772c94ce91212d9dc6c8d1c38e120d" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.761725 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-dsvq5"] Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.767615 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.777871 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-dsvq5"] Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.798944 4762 scope.go:117] "RemoveContainer" containerID="ddbf059ec0c5cd57e49045e1c1fec082fba70e626274a6ad41e1216adeecc0e5" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.845832 4762 scope.go:117] "RemoveContainer" containerID="a02f4a2b3581afc3705cd3fbff564adb1b46dd4e5b02f91768f8caf8b4b75a5f" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.854919 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npvfc\" (UniqueName: \"kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc\") pod \"manila-db-create-dsvq5\" (UID: \"e915789c-9472-42e8-8f48-60fa0b7b2759\") " pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.896769 4762 scope.go:117] "RemoveContainer" containerID="a6687d4271a97d4dc5c326eac5597fea018356c7be9f90cc53ce4ecf421d42ae" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.956733 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npvfc\" (UniqueName: \"kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc\") pod \"manila-db-create-dsvq5\" (UID: \"e915789c-9472-42e8-8f48-60fa0b7b2759\") " pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:35 crc kubenswrapper[4762]: I1009 15:12:35.976213 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npvfc\" (UniqueName: \"kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc\") pod \"manila-db-create-dsvq5\" (UID: \"e915789c-9472-42e8-8f48-60fa0b7b2759\") " pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:36 crc kubenswrapper[4762]: I1009 15:12:36.170926 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:37 crc kubenswrapper[4762]: I1009 15:12:36.999958 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-dsvq5"] Oct 09 15:12:37 crc kubenswrapper[4762]: W1009 15:12:37.008410 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode915789c_9472_42e8_8f48_60fa0b7b2759.slice/crio-f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd WatchSource:0}: Error finding container f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd: Status 404 returned error can't find the container with id f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd Oct 09 15:12:37 crc kubenswrapper[4762]: I1009 15:12:37.782039 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dsvq5" event={"ID":"e915789c-9472-42e8-8f48-60fa0b7b2759","Type":"ContainerStarted","Data":"54eedc2be0b0c3c1af0fe45c2312e5fe5ca89719a0adefb4f226892354be3a07"} Oct 09 15:12:37 crc kubenswrapper[4762]: I1009 15:12:37.782426 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dsvq5" event={"ID":"e915789c-9472-42e8-8f48-60fa0b7b2759","Type":"ContainerStarted","Data":"f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd"} Oct 09 15:12:37 crc kubenswrapper[4762]: I1009 15:12:37.803564 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-dsvq5" podStartSLOduration=2.803541537 podStartE2EDuration="2.803541537s" podCreationTimestamp="2025-10-09 15:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:12:37.801768641 +0000 UTC m=+6433.575559700" watchObservedRunningTime="2025-10-09 15:12:37.803541537 +0000 UTC m=+6433.577332596" Oct 09 15:12:38 crc kubenswrapper[4762]: I1009 15:12:38.809154 4762 generic.go:334] "Generic (PLEG): container finished" podID="e915789c-9472-42e8-8f48-60fa0b7b2759" containerID="54eedc2be0b0c3c1af0fe45c2312e5fe5ca89719a0adefb4f226892354be3a07" exitCode=0 Oct 09 15:12:38 crc kubenswrapper[4762]: I1009 15:12:38.809524 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dsvq5" event={"ID":"e915789c-9472-42e8-8f48-60fa0b7b2759","Type":"ContainerDied","Data":"54eedc2be0b0c3c1af0fe45c2312e5fe5ca89719a0adefb4f226892354be3a07"} Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.246333 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.351268 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npvfc\" (UniqueName: \"kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc\") pod \"e915789c-9472-42e8-8f48-60fa0b7b2759\" (UID: \"e915789c-9472-42e8-8f48-60fa0b7b2759\") " Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.356283 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc" (OuterVolumeSpecName: "kube-api-access-npvfc") pod "e915789c-9472-42e8-8f48-60fa0b7b2759" (UID: "e915789c-9472-42e8-8f48-60fa0b7b2759"). InnerVolumeSpecName "kube-api-access-npvfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.454015 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npvfc\" (UniqueName: \"kubernetes.io/projected/e915789c-9472-42e8-8f48-60fa0b7b2759-kube-api-access-npvfc\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.828907 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dsvq5" event={"ID":"e915789c-9472-42e8-8f48-60fa0b7b2759","Type":"ContainerDied","Data":"f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd"} Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.829211 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f74a8c727b5034a15c69ecf1fcf976dcf00a310106df7467d8af341715926bfd" Oct 09 15:12:40 crc kubenswrapper[4762]: I1009 15:12:40.828940 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dsvq5" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.925485 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-cc93-account-create-7895h"] Oct 09 15:12:55 crc kubenswrapper[4762]: E1009 15:12:55.927178 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e915789c-9472-42e8-8f48-60fa0b7b2759" containerName="mariadb-database-create" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.927195 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e915789c-9472-42e8-8f48-60fa0b7b2759" containerName="mariadb-database-create" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.927472 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e915789c-9472-42e8-8f48-60fa0b7b2759" containerName="mariadb-database-create" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.928506 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.933927 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.938106 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-cc93-account-create-7895h"] Oct 09 15:12:55 crc kubenswrapper[4762]: I1009 15:12:55.994793 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wx7z\" (UniqueName: \"kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z\") pod \"manila-cc93-account-create-7895h\" (UID: \"ec993b02-f2ba-4964-bdfa-45827565c0f2\") " pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.097187 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wx7z\" (UniqueName: \"kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z\") pod \"manila-cc93-account-create-7895h\" (UID: \"ec993b02-f2ba-4964-bdfa-45827565c0f2\") " pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.117070 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wx7z\" (UniqueName: \"kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z\") pod \"manila-cc93-account-create-7895h\" (UID: \"ec993b02-f2ba-4964-bdfa-45827565c0f2\") " pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.260352 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.704009 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-cc93-account-create-7895h"] Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.977949 4762 generic.go:334] "Generic (PLEG): container finished" podID="ec993b02-f2ba-4964-bdfa-45827565c0f2" containerID="145e1a42958431e053cc208dd55549c363a1133d2f6f0337c7d7bacf568b3afa" exitCode=0 Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.979495 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-cc93-account-create-7895h" event={"ID":"ec993b02-f2ba-4964-bdfa-45827565c0f2","Type":"ContainerDied","Data":"145e1a42958431e053cc208dd55549c363a1133d2f6f0337c7d7bacf568b3afa"} Oct 09 15:12:56 crc kubenswrapper[4762]: I1009 15:12:56.979536 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-cc93-account-create-7895h" event={"ID":"ec993b02-f2ba-4964-bdfa-45827565c0f2","Type":"ContainerStarted","Data":"539e386444f6218973e8b2c8cef54cd21377f799ac7741eec5ac46773695c715"} Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.398473 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.444820 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wx7z\" (UniqueName: \"kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z\") pod \"ec993b02-f2ba-4964-bdfa-45827565c0f2\" (UID: \"ec993b02-f2ba-4964-bdfa-45827565c0f2\") " Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.449932 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z" (OuterVolumeSpecName: "kube-api-access-4wx7z") pod "ec993b02-f2ba-4964-bdfa-45827565c0f2" (UID: "ec993b02-f2ba-4964-bdfa-45827565c0f2"). InnerVolumeSpecName "kube-api-access-4wx7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.548299 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wx7z\" (UniqueName: \"kubernetes.io/projected/ec993b02-f2ba-4964-bdfa-45827565c0f2-kube-api-access-4wx7z\") on node \"crc\" DevicePath \"\"" Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.998938 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-cc93-account-create-7895h" event={"ID":"ec993b02-f2ba-4964-bdfa-45827565c0f2","Type":"ContainerDied","Data":"539e386444f6218973e8b2c8cef54cd21377f799ac7741eec5ac46773695c715"} Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.998983 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-cc93-account-create-7895h" Oct 09 15:12:58 crc kubenswrapper[4762]: I1009 15:12:58.998988 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="539e386444f6218973e8b2c8cef54cd21377f799ac7741eec5ac46773695c715" Oct 09 15:12:59 crc kubenswrapper[4762]: I1009 15:12:59.378377 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.276933 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-nzhdh"] Oct 09 15:13:01 crc kubenswrapper[4762]: E1009 15:13:01.279176 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec993b02-f2ba-4964-bdfa-45827565c0f2" containerName="mariadb-account-create" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.279213 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec993b02-f2ba-4964-bdfa-45827565c0f2" containerName="mariadb-account-create" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.279540 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec993b02-f2ba-4964-bdfa-45827565c0f2" containerName="mariadb-account-create" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.280554 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.282975 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.283153 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-lm67f" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.300447 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-nzhdh"] Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.321672 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.321808 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kx8m\" (UniqueName: \"kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.322026 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.322115 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.423944 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.424056 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.424160 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.424251 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kx8m\" (UniqueName: \"kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.431209 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.432207 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.432825 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.463624 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kx8m\" (UniqueName: \"kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m\") pod \"manila-db-sync-nzhdh\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:01 crc kubenswrapper[4762]: I1009 15:13:01.598330 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:02 crc kubenswrapper[4762]: I1009 15:13:02.499611 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-nzhdh"] Oct 09 15:13:03 crc kubenswrapper[4762]: I1009 15:13:03.087457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-nzhdh" event={"ID":"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b","Type":"ContainerStarted","Data":"b737e3937474b3ecb15d37ae79db6f9ba2c6e2c3b24e0cf2489d17c10032fe61"} Oct 09 15:13:09 crc kubenswrapper[4762]: I1009 15:13:09.150503 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-nzhdh" event={"ID":"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b","Type":"ContainerStarted","Data":"0b4ba54b0672cfd8230f3d68c71508da5f5365fdde716ec0e9369b418092748c"} Oct 09 15:13:09 crc kubenswrapper[4762]: I1009 15:13:09.167539 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-nzhdh" podStartSLOduration=2.742268233 podStartE2EDuration="8.167521455s" podCreationTimestamp="2025-10-09 15:13:01 +0000 UTC" firstStartedPulling="2025-10-09 15:13:02.517474666 +0000 UTC m=+6458.291265705" lastFinishedPulling="2025-10-09 15:13:07.942727888 +0000 UTC m=+6463.716518927" observedRunningTime="2025-10-09 15:13:09.166106398 +0000 UTC m=+6464.939897437" watchObservedRunningTime="2025-10-09 15:13:09.167521455 +0000 UTC m=+6464.941312494" Oct 09 15:13:11 crc kubenswrapper[4762]: I1009 15:13:11.170444 4762 generic.go:334] "Generic (PLEG): container finished" podID="bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" containerID="0b4ba54b0672cfd8230f3d68c71508da5f5365fdde716ec0e9369b418092748c" exitCode=0 Oct 09 15:13:11 crc kubenswrapper[4762]: I1009 15:13:11.170550 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-nzhdh" event={"ID":"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b","Type":"ContainerDied","Data":"0b4ba54b0672cfd8230f3d68c71508da5f5365fdde716ec0e9369b418092748c"} Oct 09 15:13:11 crc kubenswrapper[4762]: I1009 15:13:11.969041 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:13:11 crc kubenswrapper[4762]: I1009 15:13:11.969095 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.634116 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.692913 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data\") pod \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.693124 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle\") pod \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.693151 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data\") pod \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.693182 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kx8m\" (UniqueName: \"kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m\") pod \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\" (UID: \"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b\") " Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.704075 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data" (OuterVolumeSpecName: "config-data") pod "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" (UID: "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.709030 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m" (OuterVolumeSpecName: "kube-api-access-7kx8m") pod "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" (UID: "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b"). InnerVolumeSpecName "kube-api-access-7kx8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.712796 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" (UID: "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.723350 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" (UID: "bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.795152 4762 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.795182 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.795192 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:12 crc kubenswrapper[4762]: I1009 15:13:12.795201 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kx8m\" (UniqueName: \"kubernetes.io/projected/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b-kube-api-access-7kx8m\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.198608 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-nzhdh" event={"ID":"bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b","Type":"ContainerDied","Data":"b737e3937474b3ecb15d37ae79db6f9ba2c6e2c3b24e0cf2489d17c10032fe61"} Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.198685 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b737e3937474b3ecb15d37ae79db6f9ba2c6e2c3b24e0cf2489d17c10032fe61" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.198753 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-nzhdh" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.478605 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 15:13:13 crc kubenswrapper[4762]: E1009 15:13:13.479174 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" containerName="manila-db-sync" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.479192 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" containerName="manila-db-sync" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.479440 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" containerName="manila-db-sync" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.480980 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.484268 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-lm67f" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.485727 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.486130 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.486365 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.510001 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-scripts\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.510388 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.510561 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.510742 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.511646 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaa6da22-9b2f-478f-9a49-c4fb27b61058-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.511695 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4wjj\" (UniqueName: \"kubernetes.io/projected/aaa6da22-9b2f-478f-9a49-c4fb27b61058-kube-api-access-q4wjj\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.518613 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.592394 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.596146 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.600981 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613416 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613505 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613541 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaa6da22-9b2f-478f-9a49-c4fb27b61058-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613562 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4wjj\" (UniqueName: \"kubernetes.io/projected/aaa6da22-9b2f-478f-9a49-c4fb27b61058-kube-api-access-q4wjj\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613817 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-scripts\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.613843 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.615117 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaa6da22-9b2f-478f-9a49-c4fb27b61058-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.629056 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.631069 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.645363 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.645922 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.657249 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa6da22-9b2f-478f-9a49-c4fb27b61058-scripts\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.668347 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4wjj\" (UniqueName: \"kubernetes.io/projected/aaa6da22-9b2f-478f-9a49-c4fb27b61058-kube-api-access-q4wjj\") pod \"manila-scheduler-0\" (UID: \"aaa6da22-9b2f-478f-9a49-c4fb27b61058\") " pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.678331 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.680201 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.690415 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715576 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715659 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715690 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdzpb\" (UniqueName: \"kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715736 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715791 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715813 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-scripts\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715864 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlgrc\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-kube-api-access-jlgrc\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.715900 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.716080 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.716106 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-ceph\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.716162 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.716197 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.716223 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.811346 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.819156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.819507 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-ceph\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.819750 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.819899 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820005 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820334 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820447 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdzpb\" (UniqueName: \"kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820568 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820734 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820817 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-scripts\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.820979 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlgrc\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-kube-api-access-jlgrc\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.821248 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.821841 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.823859 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.823860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/681131ae-c6fd-457b-b4a2-5605d5e08f69-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.824738 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.827079 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.827659 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.829080 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-scripts\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.836171 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-ceph\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.837220 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.839879 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.844675 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/681131ae-c6fd-457b-b4a2-5605d5e08f69-config-data\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.860984 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdzpb\" (UniqueName: \"kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb\") pod \"dnsmasq-dns-b797586d9-2g6s4\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.882869 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlgrc\" (UniqueName: \"kubernetes.io/projected/681131ae-c6fd-457b-b4a2-5605d5e08f69-kube-api-access-jlgrc\") pod \"manila-share-share1-0\" (UID: \"681131ae-c6fd-457b-b4a2-5605d5e08f69\") " pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.931929 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.938561 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.940572 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.950020 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 09 15:13:13 crc kubenswrapper[4762]: I1009 15:13:13.950345 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032346 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032478 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4636ea01-4aef-4d23-b4fb-93426838eeac-logs\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032543 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032578 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-scripts\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032592 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4636ea01-4aef-4d23-b4fb-93426838eeac-etc-machine-id\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032615 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv55z\" (UniqueName: \"kubernetes.io/projected/4636ea01-4aef-4d23-b4fb-93426838eeac-kube-api-access-sv55z\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.032699 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data-custom\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.076413 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136334 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136454 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4636ea01-4aef-4d23-b4fb-93426838eeac-logs\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136518 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136554 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-scripts\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136579 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4636ea01-4aef-4d23-b4fb-93426838eeac-etc-machine-id\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136608 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv55z\" (UniqueName: \"kubernetes.io/projected/4636ea01-4aef-4d23-b4fb-93426838eeac-kube-api-access-sv55z\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.136685 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data-custom\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.137697 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4636ea01-4aef-4d23-b4fb-93426838eeac-etc-machine-id\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.139817 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4636ea01-4aef-4d23-b4fb-93426838eeac-logs\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.146767 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.150902 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-scripts\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.153258 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.158131 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4636ea01-4aef-4d23-b4fb-93426838eeac-config-data-custom\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.163189 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv55z\" (UniqueName: \"kubernetes.io/projected/4636ea01-4aef-4d23-b4fb-93426838eeac-kube-api-access-sv55z\") pod \"manila-api-0\" (UID: \"4636ea01-4aef-4d23-b4fb-93426838eeac\") " pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.362186 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.465083 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 15:13:14 crc kubenswrapper[4762]: W1009 15:13:14.472005 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaa6da22_9b2f_478f_9a49_c4fb27b61058.slice/crio-d442ad830de3c42590ea4d886b0b070679954736b6d837daceb65cfc34be8cf7 WatchSource:0}: Error finding container d442ad830de3c42590ea4d886b0b070679954736b6d837daceb65cfc34be8cf7: Status 404 returned error can't find the container with id d442ad830de3c42590ea4d886b0b070679954736b6d837daceb65cfc34be8cf7 Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.476115 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.737743 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 15:13:14 crc kubenswrapper[4762]: W1009 15:13:14.741044 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod681131ae_c6fd_457b_b4a2_5605d5e08f69.slice/crio-784b41c43a69513d627320ec7edbb4250585f3080673fe3d1f61c0d17a022883 WatchSource:0}: Error finding container 784b41c43a69513d627320ec7edbb4250585f3080673fe3d1f61c0d17a022883: Status 404 returned error can't find the container with id 784b41c43a69513d627320ec7edbb4250585f3080673fe3d1f61c0d17a022883 Oct 09 15:13:14 crc kubenswrapper[4762]: I1009 15:13:14.748263 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.050984 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.244808 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"aaa6da22-9b2f-478f-9a49-c4fb27b61058","Type":"ContainerStarted","Data":"d442ad830de3c42590ea4d886b0b070679954736b6d837daceb65cfc34be8cf7"} Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.247984 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerStarted","Data":"24beca5d68f07f4f189af899d803d1c3d3cdf8da2482f832e6f7c6aa27ba3743"} Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.248073 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerStarted","Data":"cd335cf120ecc49ac5f7e66c203bdbad4b2198cb3338979cb53817830ea49840"} Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.250595 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"681131ae-c6fd-457b-b4a2-5605d5e08f69","Type":"ContainerStarted","Data":"784b41c43a69513d627320ec7edbb4250585f3080673fe3d1f61c0d17a022883"} Oct 09 15:13:15 crc kubenswrapper[4762]: I1009 15:13:15.252105 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"4636ea01-4aef-4d23-b4fb-93426838eeac","Type":"ContainerStarted","Data":"59b0b0f040a9b7ab27e4443fbe16476cb1a8e354c0957d7c72ba0b87e0a23c84"} Oct 09 15:13:16 crc kubenswrapper[4762]: I1009 15:13:16.267203 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"4636ea01-4aef-4d23-b4fb-93426838eeac","Type":"ContainerStarted","Data":"057d1f036e7c7ee86beb054851ea47e3268d03b9476b8d9b2b5fbbd653aac047"} Oct 09 15:13:16 crc kubenswrapper[4762]: I1009 15:13:16.274532 4762 generic.go:334] "Generic (PLEG): container finished" podID="783a4681-51fc-451f-afce-a38e4fd0910c" containerID="24beca5d68f07f4f189af899d803d1c3d3cdf8da2482f832e6f7c6aa27ba3743" exitCode=0 Oct 09 15:13:16 crc kubenswrapper[4762]: I1009 15:13:16.274596 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerDied","Data":"24beca5d68f07f4f189af899d803d1c3d3cdf8da2482f832e6f7c6aa27ba3743"} Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.287290 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"aaa6da22-9b2f-478f-9a49-c4fb27b61058","Type":"ContainerStarted","Data":"33c5a2c2981b5c8021f7a4302322b040437952bb4c429d90d113ce25e95df374"} Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.290827 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerStarted","Data":"1020fbaa4b252630c8faa49513a118883b35fe2716a6099d1e7f79d1168ad552"} Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.290994 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.296091 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"4636ea01-4aef-4d23-b4fb-93426838eeac","Type":"ContainerStarted","Data":"28f192dad5dbafdb0f8682553787925d3e583ce5f430158d36acc55ef91ee0b3"} Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.296347 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.316235 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" podStartSLOduration=4.3162167 podStartE2EDuration="4.3162167s" podCreationTimestamp="2025-10-09 15:13:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:13:17.307890703 +0000 UTC m=+6473.081681742" watchObservedRunningTime="2025-10-09 15:13:17.3162167 +0000 UTC m=+6473.090007739" Oct 09 15:13:17 crc kubenswrapper[4762]: I1009 15:13:17.335526 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=4.335498681 podStartE2EDuration="4.335498681s" podCreationTimestamp="2025-10-09 15:13:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:13:17.327560705 +0000 UTC m=+6473.101351764" watchObservedRunningTime="2025-10-09 15:13:17.335498681 +0000 UTC m=+6473.109289720" Oct 09 15:13:19 crc kubenswrapper[4762]: I1009 15:13:19.326060 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"aaa6da22-9b2f-478f-9a49-c4fb27b61058","Type":"ContainerStarted","Data":"8259141c77a74a41aae902da87af7adce300ec6f293ee962ae511386f964a7a6"} Oct 09 15:13:20 crc kubenswrapper[4762]: I1009 15:13:20.359783 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=6.124520037 podStartE2EDuration="7.359760065s" podCreationTimestamp="2025-10-09 15:13:13 +0000 UTC" firstStartedPulling="2025-10-09 15:13:14.475233037 +0000 UTC m=+6470.249024086" lastFinishedPulling="2025-10-09 15:13:15.710473065 +0000 UTC m=+6471.484264114" observedRunningTime="2025-10-09 15:13:20.352702541 +0000 UTC m=+6476.126493610" watchObservedRunningTime="2025-10-09 15:13:20.359760065 +0000 UTC m=+6476.133551104" Oct 09 15:13:23 crc kubenswrapper[4762]: I1009 15:13:23.812224 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 09 15:13:24 crc kubenswrapper[4762]: I1009 15:13:24.078877 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:13:24 crc kubenswrapper[4762]: I1009 15:13:24.140535 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:13:24 crc kubenswrapper[4762]: I1009 15:13:24.141025 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" containerID="cri-o://a9c646c9b983b8d7dfe9cbfa1ca87bace22818cbaf0198cee8be405380da0a9d" gracePeriod=10 Oct 09 15:13:25 crc kubenswrapper[4762]: I1009 15:13:25.785955 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.83:5353: connect: connection refused" Oct 09 15:13:26 crc kubenswrapper[4762]: I1009 15:13:26.415911 4762 generic.go:334] "Generic (PLEG): container finished" podID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerID="a9c646c9b983b8d7dfe9cbfa1ca87bace22818cbaf0198cee8be405380da0a9d" exitCode=0 Oct 09 15:13:26 crc kubenswrapper[4762]: I1009 15:13:26.415955 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" event={"ID":"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b","Type":"ContainerDied","Data":"a9c646c9b983b8d7dfe9cbfa1ca87bace22818cbaf0198cee8be405380da0a9d"} Oct 09 15:13:30 crc kubenswrapper[4762]: I1009 15:13:30.785743 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.83:5353: connect: connection refused" Oct 09 15:13:33 crc kubenswrapper[4762]: E1009 15:13:33.120736 4762 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-manila-share:current-podified" Oct 09 15:13:33 crc kubenswrapper[4762]: E1009 15:13:33.121533 4762 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manila-share,Image:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,Command:[/usr/bin/dumb-init],Args:[--single-child -- /bin/bash -c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n558h86h5fch56dh588h5d7h64ch55ch58bh64bh5dbh94h67dh55dh5d9h698h68h69h68h656h57hc4h5bch5c5h66h5bh5b6h685h58h5bdh676h5f4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:MALLOC_ARENA_MAX,Value:1,ValueFrom:nil,},EnvVar{Name:MALLOC_MMAP_THRESHOLD_,Value:131072,ValueFrom:nil,},EnvVar{Name:MALLOC_TRIM_THRESHOLD_,Value:262144,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-custom,ReadOnly:true,MountPath:/etc/manila/manila.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib-manila,ReadOnly:false,MountPath:/var/lib/manila,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:manila-share-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jlgrc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:20,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*42429,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:10,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:12,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-share-share1-0_openstack(681131ae-c6fd-457b-b4a2-5605d5e08f69): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 15:13:33 crc kubenswrapper[4762]: E1009 15:13:33.130838 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manila-share\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"probe\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\"]" pod="openstack/manila-share-share1-0" podUID="681131ae-c6fd-457b-b4a2-5605d5e08f69" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.378558 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.489771 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.490099 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694cc885dc-ccw4h" event={"ID":"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b","Type":"ContainerDied","Data":"9f34d706f5c2e04db7adf4281d9bcfdc042998919c2a600f7f101b33edd42199"} Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.490169 4762 scope.go:117] "RemoveContainer" containerID="a9c646c9b983b8d7dfe9cbfa1ca87bace22818cbaf0198cee8be405380da0a9d" Oct 09 15:13:33 crc kubenswrapper[4762]: E1009 15:13:33.494142 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manila-share\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\", failed to \"StartContainer\" for \"probe\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-share:current-podified\\\"\"]" pod="openstack/manila-share-share1-0" podUID="681131ae-c6fd-457b-b4a2-5605d5e08f69" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.507843 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb\") pod \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.507996 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc\") pod \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.508054 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config\") pod \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.508175 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb\") pod \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.508244 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfr42\" (UniqueName: \"kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42\") pod \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\" (UID: \"35e5a3da-9dde-4bae-87bb-e24ed6f5a38b\") " Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.523216 4762 scope.go:117] "RemoveContainer" containerID="fa6b4da390bfd42ec3bc81b8c0421f1afa415a262d97c43ed88f5f7e4b672e21" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.530182 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42" (OuterVolumeSpecName: "kube-api-access-tfr42") pod "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" (UID: "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b"). InnerVolumeSpecName "kube-api-access-tfr42". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.572514 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" (UID: "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.580252 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" (UID: "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.590310 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" (UID: "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.611746 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfr42\" (UniqueName: \"kubernetes.io/projected/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-kube-api-access-tfr42\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.611778 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.611788 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.611798 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.613539 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config" (OuterVolumeSpecName: "config") pod "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" (UID: "35e5a3da-9dde-4bae-87bb-e24ed6f5a38b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.713836 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.830091 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:13:33 crc kubenswrapper[4762]: I1009 15:13:33.839023 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-694cc885dc-ccw4h"] Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.645086 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.645743 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-central-agent" containerID="cri-o://c7a1e98749e8db06144d443eb938dc6016cd01492ad3d101434c0b1d63163cdf" gracePeriod=30 Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.646264 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="proxy-httpd" containerID="cri-o://83bb14ab2f1bca988fc6f192db2c2588506bfc5fc9927d20b9c3a5575e7b1dd2" gracePeriod=30 Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.646328 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="sg-core" containerID="cri-o://ece7e3e3d61c8e388cce7606cff35602b1dd34013208ef72a103652274959a2d" gracePeriod=30 Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.646380 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-notification-agent" containerID="cri-o://b2f46e696cca0167f76651362afb3bab83b00ef9177a65e3fca3e9bb08362ce9" gracePeriod=30 Oct 09 15:13:34 crc kubenswrapper[4762]: I1009 15:13:34.983502 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" path="/var/lib/kubelet/pods/35e5a3da-9dde-4bae-87bb-e24ed6f5a38b/volumes" Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.512833 4762 generic.go:334] "Generic (PLEG): container finished" podID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerID="83bb14ab2f1bca988fc6f192db2c2588506bfc5fc9927d20b9c3a5575e7b1dd2" exitCode=0 Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.513174 4762 generic.go:334] "Generic (PLEG): container finished" podID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerID="ece7e3e3d61c8e388cce7606cff35602b1dd34013208ef72a103652274959a2d" exitCode=2 Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.513189 4762 generic.go:334] "Generic (PLEG): container finished" podID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerID="c7a1e98749e8db06144d443eb938dc6016cd01492ad3d101434c0b1d63163cdf" exitCode=0 Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.513214 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerDied","Data":"83bb14ab2f1bca988fc6f192db2c2588506bfc5fc9927d20b9c3a5575e7b1dd2"} Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.513249 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerDied","Data":"ece7e3e3d61c8e388cce7606cff35602b1dd34013208ef72a103652274959a2d"} Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.513264 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerDied","Data":"c7a1e98749e8db06144d443eb938dc6016cd01492ad3d101434c0b1d63163cdf"} Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.632669 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 09 15:13:35 crc kubenswrapper[4762]: I1009 15:13:35.934203 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 09 15:13:36 crc kubenswrapper[4762]: I1009 15:13:36.075073 4762 scope.go:117] "RemoveContainer" containerID="543196df987c8252cd5630ac25328740c69421308c411e049d84bf7774e4fd3e" Oct 09 15:13:41 crc kubenswrapper[4762]: I1009 15:13:41.969644 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:13:41 crc kubenswrapper[4762]: I1009 15:13:41.971372 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:13:44 crc kubenswrapper[4762]: I1009 15:13:44.630922 4762 generic.go:334] "Generic (PLEG): container finished" podID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerID="b2f46e696cca0167f76651362afb3bab83b00ef9177a65e3fca3e9bb08362ce9" exitCode=0 Oct 09 15:13:44 crc kubenswrapper[4762]: I1009 15:13:44.631513 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerDied","Data":"b2f46e696cca0167f76651362afb3bab83b00ef9177a65e3fca3e9bb08362ce9"} Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.161820 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349579 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349652 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxqp4\" (UniqueName: \"kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349675 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349709 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349749 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349896 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.349935 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml\") pod \"d2aace88-df6f-4158-8062-66bfb82a54e5\" (UID: \"d2aace88-df6f-4158-8062-66bfb82a54e5\") " Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.350573 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.351090 4762 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.351109 4762 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d2aace88-df6f-4158-8062-66bfb82a54e5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.369581 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4" (OuterVolumeSpecName: "kube-api-access-fxqp4") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "kube-api-access-fxqp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.380891 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts" (OuterVolumeSpecName: "scripts") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.407081 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.430479 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.453746 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxqp4\" (UniqueName: \"kubernetes.io/projected/d2aace88-df6f-4158-8062-66bfb82a54e5-kube-api-access-fxqp4\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.453784 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.453797 4762 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.453808 4762 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.459102 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data" (OuterVolumeSpecName: "config-data") pod "d2aace88-df6f-4158-8062-66bfb82a54e5" (UID: "d2aace88-df6f-4158-8062-66bfb82a54e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.556068 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2aace88-df6f-4158-8062-66bfb82a54e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.653375 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d2aace88-df6f-4158-8062-66bfb82a54e5","Type":"ContainerDied","Data":"59e4a2f3a92378e93ba951688950441de49b536b2883cc617147d36dd55ffdd0"} Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.653441 4762 scope.go:117] "RemoveContainer" containerID="83bb14ab2f1bca988fc6f192db2c2588506bfc5fc9927d20b9c3a5575e7b1dd2" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.653680 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.658705 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"681131ae-c6fd-457b-b4a2-5605d5e08f69","Type":"ContainerStarted","Data":"d9386dacdef83de79d672d8631c916351f5b9421727e43c8add1c594b367a703"} Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.687360 4762 scope.go:117] "RemoveContainer" containerID="ece7e3e3d61c8e388cce7606cff35602b1dd34013208ef72a103652274959a2d" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.712320 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.729940 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.730322 4762 scope.go:117] "RemoveContainer" containerID="b2f46e696cca0167f76651362afb3bab83b00ef9177a65e3fca3e9bb08362ce9" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.747866 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749258 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749288 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749307 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-central-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749315 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-central-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749335 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="init" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749343 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="init" Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749355 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="sg-core" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749362 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="sg-core" Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749389 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-notification-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749397 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-notification-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: E1009 15:13:45.749426 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="proxy-httpd" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.749435 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="proxy-httpd" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.761245 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-notification-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.761667 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="ceilometer-central-agent" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.761680 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="proxy-httpd" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.761712 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e5a3da-9dde-4bae-87bb-e24ed6f5a38b" containerName="dnsmasq-dns" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.761724 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" containerName="sg-core" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.764376 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.764508 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.775154 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.775941 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.813595 4762 scope.go:117] "RemoveContainer" containerID="c7a1e98749e8db06144d443eb938dc6016cd01492ad3d101434c0b1d63163cdf" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.870667 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871237 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dtc9\" (UniqueName: \"kubernetes.io/projected/cb316210-6500-47c6-956c-58c488a90b28-kube-api-access-8dtc9\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871304 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-run-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871343 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-scripts\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871372 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871561 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-log-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.871616 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-config-data\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.973569 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.975274 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dtc9\" (UniqueName: \"kubernetes.io/projected/cb316210-6500-47c6-956c-58c488a90b28-kube-api-access-8dtc9\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.975753 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-run-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.976155 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-run-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.976232 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-scripts\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.976625 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.976814 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-log-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.976847 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-config-data\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.977848 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cb316210-6500-47c6-956c-58c488a90b28-log-httpd\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.978364 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.980939 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-scripts\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.981256 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:45 crc kubenswrapper[4762]: I1009 15:13:45.982186 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb316210-6500-47c6-956c-58c488a90b28-config-data\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:46 crc kubenswrapper[4762]: I1009 15:13:46.002039 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dtc9\" (UniqueName: \"kubernetes.io/projected/cb316210-6500-47c6-956c-58c488a90b28-kube-api-access-8dtc9\") pod \"ceilometer-0\" (UID: \"cb316210-6500-47c6-956c-58c488a90b28\") " pod="openstack/ceilometer-0" Oct 09 15:13:46 crc kubenswrapper[4762]: I1009 15:13:46.090163 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 15:13:46 crc kubenswrapper[4762]: I1009 15:13:46.593060 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 15:13:46 crc kubenswrapper[4762]: W1009 15:13:46.639611 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb316210_6500_47c6_956c_58c488a90b28.slice/crio-ab0a587cc3609a0b7ad4b39e5906f757237169f34b20dc3dbc0cb930e92ad240 WatchSource:0}: Error finding container ab0a587cc3609a0b7ad4b39e5906f757237169f34b20dc3dbc0cb930e92ad240: Status 404 returned error can't find the container with id ab0a587cc3609a0b7ad4b39e5906f757237169f34b20dc3dbc0cb930e92ad240 Oct 09 15:13:46 crc kubenswrapper[4762]: I1009 15:13:46.672693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb316210-6500-47c6-956c-58c488a90b28","Type":"ContainerStarted","Data":"ab0a587cc3609a0b7ad4b39e5906f757237169f34b20dc3dbc0cb930e92ad240"} Oct 09 15:13:46 crc kubenswrapper[4762]: I1009 15:13:46.983535 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2aace88-df6f-4158-8062-66bfb82a54e5" path="/var/lib/kubelet/pods/d2aace88-df6f-4158-8062-66bfb82a54e5/volumes" Oct 09 15:13:47 crc kubenswrapper[4762]: I1009 15:13:47.684150 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"681131ae-c6fd-457b-b4a2-5605d5e08f69","Type":"ContainerStarted","Data":"8d923ca1788b4fe4127e0a564640ee7a6e5f2164eab55d12288e699f15967b6c"} Oct 09 15:13:47 crc kubenswrapper[4762]: I1009 15:13:47.705125 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.445947637 podStartE2EDuration="34.70510903s" podCreationTimestamp="2025-10-09 15:13:13 +0000 UTC" firstStartedPulling="2025-10-09 15:13:14.75307607 +0000 UTC m=+6470.526867119" lastFinishedPulling="2025-10-09 15:13:45.012237473 +0000 UTC m=+6500.786028512" observedRunningTime="2025-10-09 15:13:47.698971479 +0000 UTC m=+6503.472762528" watchObservedRunningTime="2025-10-09 15:13:47.70510903 +0000 UTC m=+6503.478900069" Oct 09 15:13:50 crc kubenswrapper[4762]: I1009 15:13:50.736595 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb316210-6500-47c6-956c-58c488a90b28","Type":"ContainerStarted","Data":"0d76d7995328e4fc59d3efc4abe3d00350181d94ae470c3e44f1ea2af67640ac"} Oct 09 15:13:52 crc kubenswrapper[4762]: I1009 15:13:52.762029 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb316210-6500-47c6-956c-58c488a90b28","Type":"ContainerStarted","Data":"65f2be2cdc7060a1e1364d6a904c688d7011608369d06a2c5cb33af09303cda3"} Oct 09 15:13:53 crc kubenswrapper[4762]: I1009 15:13:53.933190 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 09 15:13:54 crc kubenswrapper[4762]: I1009 15:13:54.782366 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb316210-6500-47c6-956c-58c488a90b28","Type":"ContainerStarted","Data":"1227ab4ef09b574bda1f565c1c1818d9ce87c0a3846b9e26c3a4f6a30a868047"} Oct 09 15:13:57 crc kubenswrapper[4762]: I1009 15:13:57.821217 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cb316210-6500-47c6-956c-58c488a90b28","Type":"ContainerStarted","Data":"7fd8b69e74d1e98dc13d2e21a749ac0f35f244eeba2f5cc3911d42c262393a69"} Oct 09 15:13:57 crc kubenswrapper[4762]: I1009 15:13:57.821960 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 15:13:57 crc kubenswrapper[4762]: I1009 15:13:57.845749 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.608809851 podStartE2EDuration="12.845727682s" podCreationTimestamp="2025-10-09 15:13:45 +0000 UTC" firstStartedPulling="2025-10-09 15:13:46.641576271 +0000 UTC m=+6502.415367310" lastFinishedPulling="2025-10-09 15:13:56.878494112 +0000 UTC m=+6512.652285141" observedRunningTime="2025-10-09 15:13:57.843219428 +0000 UTC m=+6513.617010467" watchObservedRunningTime="2025-10-09 15:13:57.845727682 +0000 UTC m=+6513.619518721" Oct 09 15:14:05 crc kubenswrapper[4762]: I1009 15:14:05.453858 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 09 15:14:11 crc kubenswrapper[4762]: I1009 15:14:11.969563 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:14:11 crc kubenswrapper[4762]: I1009 15:14:11.970179 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:14:11 crc kubenswrapper[4762]: I1009 15:14:11.970232 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:14:11 crc kubenswrapper[4762]: I1009 15:14:11.970893 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:14:11 crc kubenswrapper[4762]: I1009 15:14:11.970948 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05" gracePeriod=600 Oct 09 15:14:12 crc kubenswrapper[4762]: I1009 15:14:12.963828 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05" exitCode=0 Oct 09 15:14:12 crc kubenswrapper[4762]: I1009 15:14:12.963924 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05"} Oct 09 15:14:12 crc kubenswrapper[4762]: I1009 15:14:12.964512 4762 scope.go:117] "RemoveContainer" containerID="35c7aebddc7d19d2cc79200fbf40a9f94ad48013c10b612bd476e919aed06c38" Oct 09 15:14:12 crc kubenswrapper[4762]: I1009 15:14:12.980481 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1"} Oct 09 15:14:16 crc kubenswrapper[4762]: I1009 15:14:16.095739 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 15:14:36 crc kubenswrapper[4762]: I1009 15:14:36.266842 4762 scope.go:117] "RemoveContainer" containerID="bac4754b7520d66c9513310eb838cd888699f008020fbbe299e9aeeb2def4984" Oct 09 15:14:36 crc kubenswrapper[4762]: I1009 15:14:36.322458 4762 scope.go:117] "RemoveContainer" containerID="4de5ed046823a6fd5603590309ae6b0c7ce1c94661cc116d1c21e2e0b17ebbdc" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.818461 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.822223 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.824712 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.841398 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992033 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992086 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992122 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992142 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992233 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxv24\" (UniqueName: \"kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:48 crc kubenswrapper[4762]: I1009 15:14:48.992598 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.094568 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.094719 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.094810 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.095725 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.095785 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.095871 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.095888 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.095912 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.096461 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.096537 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxv24\" (UniqueName: \"kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.096787 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.118152 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxv24\" (UniqueName: \"kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24\") pod \"dnsmasq-dns-686dc8c44c-wdgdz\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.154390 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:49 crc kubenswrapper[4762]: I1009 15:14:49.651447 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:14:50 crc kubenswrapper[4762]: I1009 15:14:50.339804 4762 generic.go:334] "Generic (PLEG): container finished" podID="e4988923-31d0-4552-86fc-20d97114cbcb" containerID="3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473" exitCode=0 Oct 09 15:14:50 crc kubenswrapper[4762]: I1009 15:14:50.339854 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" event={"ID":"e4988923-31d0-4552-86fc-20d97114cbcb","Type":"ContainerDied","Data":"3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473"} Oct 09 15:14:50 crc kubenswrapper[4762]: I1009 15:14:50.341305 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" event={"ID":"e4988923-31d0-4552-86fc-20d97114cbcb","Type":"ContainerStarted","Data":"adca079e158e56373345a6da25e51b5901d89d5c3b3dbd35bf6de2c3e78d1180"} Oct 09 15:14:51 crc kubenswrapper[4762]: I1009 15:14:51.391996 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" event={"ID":"e4988923-31d0-4552-86fc-20d97114cbcb","Type":"ContainerStarted","Data":"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0"} Oct 09 15:14:51 crc kubenswrapper[4762]: I1009 15:14:51.393010 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:51 crc kubenswrapper[4762]: I1009 15:14:51.416288 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" podStartSLOduration=3.416243639 podStartE2EDuration="3.416243639s" podCreationTimestamp="2025-10-09 15:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:14:51.411410494 +0000 UTC m=+6567.185201533" watchObservedRunningTime="2025-10-09 15:14:51.416243639 +0000 UTC m=+6567.190034678" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.155804 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.226042 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.226727 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="dnsmasq-dns" containerID="cri-o://1020fbaa4b252630c8faa49513a118883b35fe2716a6099d1e7f79d1168ad552" gracePeriod=10 Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.388566 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74fc4ff4bc-5x75j"] Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.398088 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.416793 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74fc4ff4bc-5x75j"] Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.501360 4762 generic.go:334] "Generic (PLEG): container finished" podID="783a4681-51fc-451f-afce-a38e4fd0910c" containerID="1020fbaa4b252630c8faa49513a118883b35fe2716a6099d1e7f79d1168ad552" exitCode=0 Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.501410 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerDied","Data":"1020fbaa4b252630c8faa49513a118883b35fe2716a6099d1e7f79d1168ad552"} Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.513993 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-config\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.514068 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-dns-svc\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.514362 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-openstack-cell1\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.514436 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.514537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.514682 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npncp\" (UniqueName: \"kubernetes.io/projected/5c397658-db91-4151-b5de-2aac1e540cf1-kube-api-access-npncp\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.617072 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-config\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618037 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-config\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618133 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-dns-svc\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618231 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-openstack-cell1\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618261 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618296 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.618339 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npncp\" (UniqueName: \"kubernetes.io/projected/5c397658-db91-4151-b5de-2aac1e540cf1-kube-api-access-npncp\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.619484 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-sb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.619764 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-ovsdbserver-nb\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.620045 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-openstack-cell1\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.622311 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c397658-db91-4151-b5de-2aac1e540cf1-dns-svc\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.658819 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npncp\" (UniqueName: \"kubernetes.io/projected/5c397658-db91-4151-b5de-2aac1e540cf1-kube-api-access-npncp\") pod \"dnsmasq-dns-74fc4ff4bc-5x75j\" (UID: \"5c397658-db91-4151-b5de-2aac1e540cf1\") " pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.743007 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:14:59 crc kubenswrapper[4762]: I1009 15:14:59.883758 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.025155 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb\") pod \"783a4681-51fc-451f-afce-a38e4fd0910c\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.025362 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config\") pod \"783a4681-51fc-451f-afce-a38e4fd0910c\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.025475 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdzpb\" (UniqueName: \"kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb\") pod \"783a4681-51fc-451f-afce-a38e4fd0910c\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.025533 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb\") pod \"783a4681-51fc-451f-afce-a38e4fd0910c\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.025589 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc\") pod \"783a4681-51fc-451f-afce-a38e4fd0910c\" (UID: \"783a4681-51fc-451f-afce-a38e4fd0910c\") " Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.032077 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb" (OuterVolumeSpecName: "kube-api-access-bdzpb") pod "783a4681-51fc-451f-afce-a38e4fd0910c" (UID: "783a4681-51fc-451f-afce-a38e4fd0910c"). InnerVolumeSpecName "kube-api-access-bdzpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.107918 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config" (OuterVolumeSpecName: "config") pod "783a4681-51fc-451f-afce-a38e4fd0910c" (UID: "783a4681-51fc-451f-afce-a38e4fd0910c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.115914 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "783a4681-51fc-451f-afce-a38e4fd0910c" (UID: "783a4681-51fc-451f-afce-a38e4fd0910c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.127392 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "783a4681-51fc-451f-afce-a38e4fd0910c" (UID: "783a4681-51fc-451f-afce-a38e4fd0910c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.166841 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.166899 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdzpb\" (UniqueName: \"kubernetes.io/projected/783a4681-51fc-451f-afce-a38e4fd0910c-kube-api-access-bdzpb\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.166995 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.167011 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.192010 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "783a4681-51fc-451f-afce-a38e4fd0910c" (UID: "783a4681-51fc-451f-afce-a38e4fd0910c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.226710 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t"] Oct 09 15:15:00 crc kubenswrapper[4762]: E1009 15:15:00.227695 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="dnsmasq-dns" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.227723 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="dnsmasq-dns" Oct 09 15:15:00 crc kubenswrapper[4762]: E1009 15:15:00.227802 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="init" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.227813 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="init" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.228177 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" containerName="dnsmasq-dns" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.229570 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.233085 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.236817 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.242622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t"] Oct 09 15:15:00 crc kubenswrapper[4762]: W1009 15:15:00.264817 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c397658_db91_4151_b5de_2aac1e540cf1.slice/crio-b7d6108e2178e730292869523519e327f5c904d5add087a48167140f4257d067 WatchSource:0}: Error finding container b7d6108e2178e730292869523519e327f5c904d5add087a48167140f4257d067: Status 404 returned error can't find the container with id b7d6108e2178e730292869523519e327f5c904d5add087a48167140f4257d067 Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.268813 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74fc4ff4bc-5x75j"] Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.277129 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/783a4681-51fc-451f-afce-a38e4fd0910c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.379376 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.379583 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.379667 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvgqn\" (UniqueName: \"kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.481866 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.482209 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.482244 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvgqn\" (UniqueName: \"kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.483270 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.486971 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.498477 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvgqn\" (UniqueName: \"kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn\") pod \"collect-profiles-29333715-tk74t\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.519101 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" event={"ID":"5c397658-db91-4151-b5de-2aac1e540cf1","Type":"ContainerStarted","Data":"b7d6108e2178e730292869523519e327f5c904d5add087a48167140f4257d067"} Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.528287 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" event={"ID":"783a4681-51fc-451f-afce-a38e4fd0910c","Type":"ContainerDied","Data":"cd335cf120ecc49ac5f7e66c203bdbad4b2198cb3338979cb53817830ea49840"} Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.528337 4762 scope.go:117] "RemoveContainer" containerID="1020fbaa4b252630c8faa49513a118883b35fe2716a6099d1e7f79d1168ad552" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.528497 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b797586d9-2g6s4" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.559384 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.603790 4762 scope.go:117] "RemoveContainer" containerID="24beca5d68f07f4f189af899d803d1c3d3cdf8da2482f832e6f7c6aa27ba3743" Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.724596 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.734431 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b797586d9-2g6s4"] Oct 09 15:15:00 crc kubenswrapper[4762]: I1009 15:15:00.978052 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="783a4681-51fc-451f-afce-a38e4fd0910c" path="/var/lib/kubelet/pods/783a4681-51fc-451f-afce-a38e4fd0910c/volumes" Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.061113 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t"] Oct 09 15:15:01 crc kubenswrapper[4762]: W1009 15:15:01.064097 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f8ed55b_7571_4665_ba42_64d1e3b6d7a5.slice/crio-ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07 WatchSource:0}: Error finding container ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07: Status 404 returned error can't find the container with id ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07 Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.554604 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" event={"ID":"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5","Type":"ContainerStarted","Data":"a62d3cd2090d9919489b9ded7ed05f65c087f6d697159e7171d5fb03c20577c6"} Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.554970 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" event={"ID":"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5","Type":"ContainerStarted","Data":"ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07"} Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.561423 4762 generic.go:334] "Generic (PLEG): container finished" podID="5c397658-db91-4151-b5de-2aac1e540cf1" containerID="e9bdb944a9e3e4e019962b34669b85393bcd6ae13840c9031a99c2a6aa91f6a7" exitCode=0 Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.561471 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" event={"ID":"5c397658-db91-4151-b5de-2aac1e540cf1","Type":"ContainerDied","Data":"e9bdb944a9e3e4e019962b34669b85393bcd6ae13840c9031a99c2a6aa91f6a7"} Oct 09 15:15:01 crc kubenswrapper[4762]: I1009 15:15:01.575273 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" podStartSLOduration=1.57524957 podStartE2EDuration="1.57524957s" podCreationTimestamp="2025-10-09 15:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:15:01.570536807 +0000 UTC m=+6577.344327846" watchObservedRunningTime="2025-10-09 15:15:01.57524957 +0000 UTC m=+6577.349040609" Oct 09 15:15:02 crc kubenswrapper[4762]: I1009 15:15:02.579614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" event={"ID":"5c397658-db91-4151-b5de-2aac1e540cf1","Type":"ContainerStarted","Data":"84d2171f3924b1c6f6ef3cd67d08f97b3eb2b1ad724d4897ec43867193f88e95"} Oct 09 15:15:02 crc kubenswrapper[4762]: I1009 15:15:02.581539 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:15:02 crc kubenswrapper[4762]: I1009 15:15:02.583917 4762 generic.go:334] "Generic (PLEG): container finished" podID="1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" containerID="a62d3cd2090d9919489b9ded7ed05f65c087f6d697159e7171d5fb03c20577c6" exitCode=0 Oct 09 15:15:02 crc kubenswrapper[4762]: I1009 15:15:02.583946 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" event={"ID":"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5","Type":"ContainerDied","Data":"a62d3cd2090d9919489b9ded7ed05f65c087f6d697159e7171d5fb03c20577c6"} Oct 09 15:15:02 crc kubenswrapper[4762]: I1009 15:15:02.609665 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" podStartSLOduration=3.60962886 podStartE2EDuration="3.60962886s" podCreationTimestamp="2025-10-09 15:14:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:15:02.602540935 +0000 UTC m=+6578.376331974" watchObservedRunningTime="2025-10-09 15:15:02.60962886 +0000 UTC m=+6578.383419899" Oct 09 15:15:03 crc kubenswrapper[4762]: I1009 15:15:03.038208 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-9hzrj"] Oct 09 15:15:03 crc kubenswrapper[4762]: I1009 15:15:03.050028 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-9hzrj"] Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.013387 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.162442 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume\") pod \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.162506 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume\") pod \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.162690 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvgqn\" (UniqueName: \"kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn\") pod \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\" (UID: \"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5\") " Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.163617 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume" (OuterVolumeSpecName: "config-volume") pod "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" (UID: "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.169016 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn" (OuterVolumeSpecName: "kube-api-access-vvgqn") pod "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" (UID: "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5"). InnerVolumeSpecName "kube-api-access-vvgqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.171018 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" (UID: "1f8ed55b-7571-4665-ba42-64d1e3b6d7a5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.264566 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.264601 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.264611 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvgqn\" (UniqueName: \"kubernetes.io/projected/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5-kube-api-access-vvgqn\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.613303 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.613379 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t" event={"ID":"1f8ed55b-7571-4665-ba42-64d1e3b6d7a5","Type":"ContainerDied","Data":"ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07"} Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.613462 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea96ce6a3d267e78598c9c478161ba08c8f7981687d0df3fe87472583eadab07" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.634254 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7"] Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.648270 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333670-6nhf7"] Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.981240 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52bffb38-0b4e-4e17-a6b2-4be46c24d322" path="/var/lib/kubelet/pods/52bffb38-0b4e-4e17-a6b2-4be46c24d322/volumes" Oct 09 15:15:04 crc kubenswrapper[4762]: I1009 15:15:04.982745 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ada7d5e9-569f-48b8-94c9-f7bad43412d2" path="/var/lib/kubelet/pods/ada7d5e9-569f-48b8-94c9-f7bad43412d2/volumes" Oct 09 15:15:09 crc kubenswrapper[4762]: I1009 15:15:09.745486 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74fc4ff4bc-5x75j" Oct 09 15:15:09 crc kubenswrapper[4762]: I1009 15:15:09.817004 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:15:09 crc kubenswrapper[4762]: I1009 15:15:09.817266 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="dnsmasq-dns" containerID="cri-o://cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0" gracePeriod=10 Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.364172 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495300 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495371 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495474 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495499 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxv24\" (UniqueName: \"kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495596 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.495754 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1\") pod \"e4988923-31d0-4552-86fc-20d97114cbcb\" (UID: \"e4988923-31d0-4552-86fc-20d97114cbcb\") " Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.517932 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24" (OuterVolumeSpecName: "kube-api-access-rxv24") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "kube-api-access-rxv24". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.561445 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.562131 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.562783 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.577227 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config" (OuterVolumeSpecName: "config") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.584960 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "e4988923-31d0-4552-86fc-20d97114cbcb" (UID: "e4988923-31d0-4552-86fc-20d97114cbcb"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598285 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598328 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxv24\" (UniqueName: \"kubernetes.io/projected/e4988923-31d0-4552-86fc-20d97114cbcb-kube-api-access-rxv24\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598340 4762 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598349 4762 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598360 4762 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.598372 4762 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4988923-31d0-4552-86fc-20d97114cbcb-config\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.670217 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.670222 4762 generic.go:334] "Generic (PLEG): container finished" podID="e4988923-31d0-4552-86fc-20d97114cbcb" containerID="cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0" exitCode=0 Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.670257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" event={"ID":"e4988923-31d0-4552-86fc-20d97114cbcb","Type":"ContainerDied","Data":"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0"} Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.670305 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-686dc8c44c-wdgdz" event={"ID":"e4988923-31d0-4552-86fc-20d97114cbcb","Type":"ContainerDied","Data":"adca079e158e56373345a6da25e51b5901d89d5c3b3dbd35bf6de2c3e78d1180"} Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.670327 4762 scope.go:117] "RemoveContainer" containerID="cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.694032 4762 scope.go:117] "RemoveContainer" containerID="3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.703708 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.711462 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-686dc8c44c-wdgdz"] Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.735494 4762 scope.go:117] "RemoveContainer" containerID="cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0" Oct 09 15:15:10 crc kubenswrapper[4762]: E1009 15:15:10.735906 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0\": container with ID starting with cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0 not found: ID does not exist" containerID="cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.735947 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0"} err="failed to get container status \"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0\": rpc error: code = NotFound desc = could not find container \"cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0\": container with ID starting with cec0f37880d4abea3eab3139b79595e64081b05416e194d46056c5c34176c6f0 not found: ID does not exist" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.735973 4762 scope.go:117] "RemoveContainer" containerID="3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473" Oct 09 15:15:10 crc kubenswrapper[4762]: E1009 15:15:10.736354 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473\": container with ID starting with 3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473 not found: ID does not exist" containerID="3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.736384 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473"} err="failed to get container status \"3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473\": rpc error: code = NotFound desc = could not find container \"3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473\": container with ID starting with 3103353dee6f79ef9b5d7a8b66d46b715107da0733877090d27315afae0cc473 not found: ID does not exist" Oct 09 15:15:10 crc kubenswrapper[4762]: I1009 15:15:10.978402 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" path="/var/lib/kubelet/pods/e4988923-31d0-4552-86fc-20d97114cbcb/volumes" Oct 09 15:15:16 crc kubenswrapper[4762]: I1009 15:15:16.046267 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-cc4e-account-create-twt7d"] Oct 09 15:15:16 crc kubenswrapper[4762]: I1009 15:15:16.058913 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-cc4e-account-create-twt7d"] Oct 09 15:15:16 crc kubenswrapper[4762]: I1009 15:15:16.986485 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6762a3f-8854-4975-a726-e82b92256895" path="/var/lib/kubelet/pods/a6762a3f-8854-4975-a726-e82b92256895/volumes" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.552556 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf"] Oct 09 15:15:20 crc kubenswrapper[4762]: E1009 15:15:20.553376 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="dnsmasq-dns" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.553391 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="dnsmasq-dns" Oct 09 15:15:20 crc kubenswrapper[4762]: E1009 15:15:20.553434 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="init" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.553441 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="init" Oct 09 15:15:20 crc kubenswrapper[4762]: E1009 15:15:20.553457 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" containerName="collect-profiles" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.553464 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" containerName="collect-profiles" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.553747 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4988923-31d0-4552-86fc-20d97114cbcb" containerName="dnsmasq-dns" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.553771 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" containerName="collect-profiles" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.554628 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.556799 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.558166 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.558567 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.561005 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.563764 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf"] Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.706969 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.707056 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.707079 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.707280 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hstp9\" (UniqueName: \"kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.707874 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.809818 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.809924 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.809952 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.809995 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hstp9\" (UniqueName: \"kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.810102 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.816228 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.816563 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.816670 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.823392 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.830126 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hstp9\" (UniqueName: \"kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:20 crc kubenswrapper[4762]: I1009 15:15:20.884124 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:21 crc kubenswrapper[4762]: I1009 15:15:21.535116 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf"] Oct 09 15:15:21 crc kubenswrapper[4762]: I1009 15:15:21.796614 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" event={"ID":"8043119c-8c14-4c3f-b587-4daa7d8b1dde","Type":"ContainerStarted","Data":"21fc49a0f34b4faf50ae10b68998b5885a486946984b17cbbe31f330e7dda6a4"} Oct 09 15:15:22 crc kubenswrapper[4762]: I1009 15:15:22.028838 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-nc8ls"] Oct 09 15:15:22 crc kubenswrapper[4762]: I1009 15:15:22.041385 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-nc8ls"] Oct 09 15:15:22 crc kubenswrapper[4762]: I1009 15:15:22.978021 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6e5304c-1aea-4e8c-9211-0f001f520cbe" path="/var/lib/kubelet/pods/a6e5304c-1aea-4e8c-9211-0f001f520cbe/volumes" Oct 09 15:15:32 crc kubenswrapper[4762]: I1009 15:15:32.292612 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:15:33 crc kubenswrapper[4762]: I1009 15:15:33.036940 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-c94a-account-create-86xpl"] Oct 09 15:15:33 crc kubenswrapper[4762]: I1009 15:15:33.048438 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-c94a-account-create-86xpl"] Oct 09 15:15:33 crc kubenswrapper[4762]: I1009 15:15:33.932796 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" event={"ID":"8043119c-8c14-4c3f-b587-4daa7d8b1dde","Type":"ContainerStarted","Data":"9e78c62608e62f99d9109c9d054dfa132e506c37c26880e154c818f6a2274471"} Oct 09 15:15:33 crc kubenswrapper[4762]: I1009 15:15:33.960654 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" podStartSLOduration=3.225419103 podStartE2EDuration="13.960602596s" podCreationTimestamp="2025-10-09 15:15:20 +0000 UTC" firstStartedPulling="2025-10-09 15:15:21.554615875 +0000 UTC m=+6597.328406924" lastFinishedPulling="2025-10-09 15:15:32.289799378 +0000 UTC m=+6608.063590417" observedRunningTime="2025-10-09 15:15:33.948848319 +0000 UTC m=+6609.722639368" watchObservedRunningTime="2025-10-09 15:15:33.960602596 +0000 UTC m=+6609.734393655" Oct 09 15:15:34 crc kubenswrapper[4762]: I1009 15:15:34.978467 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac9a0b13-d6a5-4ec1-8277-c76d624988fb" path="/var/lib/kubelet/pods/ac9a0b13-d6a5-4ec1-8277-c76d624988fb/volumes" Oct 09 15:15:36 crc kubenswrapper[4762]: I1009 15:15:36.700426 4762 scope.go:117] "RemoveContainer" containerID="be530af523124f4655c6e0b7eb7bb440dbfcabe2dc0ef39f909c7224d80df346" Oct 09 15:15:36 crc kubenswrapper[4762]: I1009 15:15:36.771707 4762 scope.go:117] "RemoveContainer" containerID="a9a90843032f9a4e0ff176622afafcaaed68b48ca2954977378b0c8340690cbc" Oct 09 15:15:36 crc kubenswrapper[4762]: I1009 15:15:36.817996 4762 scope.go:117] "RemoveContainer" containerID="c37576a22e1fd9a21099d056236bb751b369c2df294fb807b7ba3ffd2299cfd2" Oct 09 15:15:36 crc kubenswrapper[4762]: I1009 15:15:36.871175 4762 scope.go:117] "RemoveContainer" containerID="ea479bf12a2cda7843b77ba1f296d0d80aa847a30a5992e10468e82257f7b79d" Oct 09 15:15:36 crc kubenswrapper[4762]: I1009 15:15:36.922451 4762 scope.go:117] "RemoveContainer" containerID="270f067d8caa967c131fa9e1dcdc8d537699fc5e7f182152cbc155705521729a" Oct 09 15:15:48 crc kubenswrapper[4762]: I1009 15:15:48.072503 4762 generic.go:334] "Generic (PLEG): container finished" podID="8043119c-8c14-4c3f-b587-4daa7d8b1dde" containerID="9e78c62608e62f99d9109c9d054dfa132e506c37c26880e154c818f6a2274471" exitCode=0 Oct 09 15:15:48 crc kubenswrapper[4762]: I1009 15:15:48.072599 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" event={"ID":"8043119c-8c14-4c3f-b587-4daa7d8b1dde","Type":"ContainerDied","Data":"9e78c62608e62f99d9109c9d054dfa132e506c37c26880e154c818f6a2274471"} Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.780409 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.866727 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle\") pod \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.867082 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key\") pod \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.867155 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hstp9\" (UniqueName: \"kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9\") pod \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.867188 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph\") pod \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.867375 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory\") pod \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\" (UID: \"8043119c-8c14-4c3f-b587-4daa7d8b1dde\") " Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.873172 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph" (OuterVolumeSpecName: "ceph") pod "8043119c-8c14-4c3f-b587-4daa7d8b1dde" (UID: "8043119c-8c14-4c3f-b587-4daa7d8b1dde"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.873856 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9" (OuterVolumeSpecName: "kube-api-access-hstp9") pod "8043119c-8c14-4c3f-b587-4daa7d8b1dde" (UID: "8043119c-8c14-4c3f-b587-4daa7d8b1dde"). InnerVolumeSpecName "kube-api-access-hstp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.874050 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "8043119c-8c14-4c3f-b587-4daa7d8b1dde" (UID: "8043119c-8c14-4c3f-b587-4daa7d8b1dde"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.898216 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8043119c-8c14-4c3f-b587-4daa7d8b1dde" (UID: "8043119c-8c14-4c3f-b587-4daa7d8b1dde"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.910049 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory" (OuterVolumeSpecName: "inventory") pod "8043119c-8c14-4c3f-b587-4daa7d8b1dde" (UID: "8043119c-8c14-4c3f-b587-4daa7d8b1dde"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.969813 4762 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.969851 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.969862 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hstp9\" (UniqueName: \"kubernetes.io/projected/8043119c-8c14-4c3f-b587-4daa7d8b1dde-kube-api-access-hstp9\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.969871 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:49 crc kubenswrapper[4762]: I1009 15:15:49.969881 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043119c-8c14-4c3f-b587-4daa7d8b1dde-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:15:50 crc kubenswrapper[4762]: I1009 15:15:50.091248 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" event={"ID":"8043119c-8c14-4c3f-b587-4daa7d8b1dde","Type":"ContainerDied","Data":"21fc49a0f34b4faf50ae10b68998b5885a486946984b17cbbe31f330e7dda6a4"} Oct 09 15:15:50 crc kubenswrapper[4762]: I1009 15:15:50.091293 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21fc49a0f34b4faf50ae10b68998b5885a486946984b17cbbe31f330e7dda6a4" Oct 09 15:15:50 crc kubenswrapper[4762]: I1009 15:15:50.091344 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.147992 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5"] Oct 09 15:15:53 crc kubenswrapper[4762]: E1009 15:15:53.148736 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8043119c-8c14-4c3f-b587-4daa7d8b1dde" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.148756 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8043119c-8c14-4c3f-b587-4daa7d8b1dde" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.149061 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8043119c-8c14-4c3f-b587-4daa7d8b1dde" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.150087 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.152362 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.152710 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.152755 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.153595 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.160967 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5"] Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.234056 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.234136 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.234207 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.234445 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.234505 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjx6m\" (UniqueName: \"kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.336818 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.336866 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.336899 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.337009 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.337039 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjx6m\" (UniqueName: \"kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.342493 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.342533 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.342828 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.344126 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.361370 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjx6m\" (UniqueName: \"kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:53 crc kubenswrapper[4762]: I1009 15:15:53.473093 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:15:54 crc kubenswrapper[4762]: I1009 15:15:54.016929 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5"] Oct 09 15:15:54 crc kubenswrapper[4762]: I1009 15:15:54.127132 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" event={"ID":"37673896-c4d5-42e0-934f-1a36d759b2e2","Type":"ContainerStarted","Data":"b3df50dfb6e6bf803329e362f0b2d82ea24538493584d22d7b316a853361f3b6"} Oct 09 15:15:56 crc kubenswrapper[4762]: I1009 15:15:56.148325 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" event={"ID":"37673896-c4d5-42e0-934f-1a36d759b2e2","Type":"ContainerStarted","Data":"b5b2db53aceb954952df31de74be47aa80cdf4a0f7f7254858662dce7dcec679"} Oct 09 15:15:56 crc kubenswrapper[4762]: I1009 15:15:56.176789 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" podStartSLOduration=2.196801872 podStartE2EDuration="3.176769725s" podCreationTimestamp="2025-10-09 15:15:53 +0000 UTC" firstStartedPulling="2025-10-09 15:15:54.029533443 +0000 UTC m=+6629.803324472" lastFinishedPulling="2025-10-09 15:15:55.009501276 +0000 UTC m=+6630.783292325" observedRunningTime="2025-10-09 15:15:56.171725574 +0000 UTC m=+6631.945516643" watchObservedRunningTime="2025-10-09 15:15:56.176769725 +0000 UTC m=+6631.950560764" Oct 09 15:16:23 crc kubenswrapper[4762]: I1009 15:16:23.048030 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-xp9xs"] Oct 09 15:16:23 crc kubenswrapper[4762]: I1009 15:16:23.057697 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-xp9xs"] Oct 09 15:16:24 crc kubenswrapper[4762]: I1009 15:16:24.982100 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e5f6a8-e10a-43f3-a35f-69803d46fdee" path="/var/lib/kubelet/pods/32e5f6a8-e10a-43f3-a35f-69803d46fdee/volumes" Oct 09 15:16:37 crc kubenswrapper[4762]: I1009 15:16:37.381549 4762 scope.go:117] "RemoveContainer" containerID="1be7fd6d28bed47e86c368413296198b0d95137726284d48aa391358e1e093e2" Oct 09 15:16:37 crc kubenswrapper[4762]: I1009 15:16:37.435798 4762 scope.go:117] "RemoveContainer" containerID="351e2a2a523ed33daf056da115a78fa2f6486907aa98cf76860348785e3c578b" Oct 09 15:16:41 crc kubenswrapper[4762]: I1009 15:16:41.970016 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:16:41 crc kubenswrapper[4762]: I1009 15:16:41.970664 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:17:11 crc kubenswrapper[4762]: I1009 15:17:11.968949 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:17:11 crc kubenswrapper[4762]: I1009 15:17:11.970739 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:17:41 crc kubenswrapper[4762]: I1009 15:17:41.968914 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:17:41 crc kubenswrapper[4762]: I1009 15:17:41.969539 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:17:41 crc kubenswrapper[4762]: I1009 15:17:41.969586 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:17:41 crc kubenswrapper[4762]: I1009 15:17:41.970046 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:17:41 crc kubenswrapper[4762]: I1009 15:17:41.970101 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" gracePeriod=600 Oct 09 15:17:42 crc kubenswrapper[4762]: E1009 15:17:42.134510 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:17:42 crc kubenswrapper[4762]: I1009 15:17:42.213995 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" exitCode=0 Oct 09 15:17:42 crc kubenswrapper[4762]: I1009 15:17:42.214042 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1"} Oct 09 15:17:42 crc kubenswrapper[4762]: I1009 15:17:42.214073 4762 scope.go:117] "RemoveContainer" containerID="6dfc6618e94878981ab72abe0029ff9f6296087febe811d4f6b3bbf2f5ecbf05" Oct 09 15:17:42 crc kubenswrapper[4762]: I1009 15:17:42.214855 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:17:42 crc kubenswrapper[4762]: E1009 15:17:42.215211 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:17:53 crc kubenswrapper[4762]: I1009 15:17:53.965338 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:17:53 crc kubenswrapper[4762]: E1009 15:17:53.967122 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:18:07 crc kubenswrapper[4762]: I1009 15:18:07.966164 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:18:07 crc kubenswrapper[4762]: E1009 15:18:07.966900 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:18:21 crc kubenswrapper[4762]: I1009 15:18:21.966171 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:18:21 crc kubenswrapper[4762]: E1009 15:18:21.967176 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.872996 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.876130 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.888910 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.945065 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c5pk\" (UniqueName: \"kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.945395 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:30 crc kubenswrapper[4762]: I1009 15:18:30.945545 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.048803 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.049046 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c5pk\" (UniqueName: \"kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.049139 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.049865 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.049931 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.073071 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c5pk\" (UniqueName: \"kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk\") pod \"redhat-operators-6qxzx\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.212001 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:31 crc kubenswrapper[4762]: I1009 15:18:31.844962 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:18:32 crc kubenswrapper[4762]: I1009 15:18:32.693730 4762 generic.go:334] "Generic (PLEG): container finished" podID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerID="61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009" exitCode=0 Oct 09 15:18:32 crc kubenswrapper[4762]: I1009 15:18:32.693809 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerDied","Data":"61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009"} Oct 09 15:18:32 crc kubenswrapper[4762]: I1009 15:18:32.694251 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerStarted","Data":"5d174e21d7d7ea51006c99e78189b5e2447f23b35cb27f78cfd967d37a7d2fca"} Oct 09 15:18:32 crc kubenswrapper[4762]: I1009 15:18:32.696387 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:18:33 crc kubenswrapper[4762]: I1009 15:18:33.965112 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:18:33 crc kubenswrapper[4762]: E1009 15:18:33.966359 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:18:34 crc kubenswrapper[4762]: I1009 15:18:34.716257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerStarted","Data":"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9"} Oct 09 15:18:45 crc kubenswrapper[4762]: I1009 15:18:45.823432 4762 generic.go:334] "Generic (PLEG): container finished" podID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerID="a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9" exitCode=0 Oct 09 15:18:45 crc kubenswrapper[4762]: I1009 15:18:45.823515 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerDied","Data":"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9"} Oct 09 15:18:47 crc kubenswrapper[4762]: I1009 15:18:47.853083 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerStarted","Data":"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a"} Oct 09 15:18:47 crc kubenswrapper[4762]: I1009 15:18:47.885398 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6qxzx" podStartSLOduration=3.821634 podStartE2EDuration="17.885376387s" podCreationTimestamp="2025-10-09 15:18:30 +0000 UTC" firstStartedPulling="2025-10-09 15:18:32.696183613 +0000 UTC m=+6788.469974652" lastFinishedPulling="2025-10-09 15:18:46.75992599 +0000 UTC m=+6802.533717039" observedRunningTime="2025-10-09 15:18:47.873111327 +0000 UTC m=+6803.646902366" watchObservedRunningTime="2025-10-09 15:18:47.885376387 +0000 UTC m=+6803.659167436" Oct 09 15:18:48 crc kubenswrapper[4762]: I1009 15:18:48.965443 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:18:48 crc kubenswrapper[4762]: E1009 15:18:48.965891 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:18:51 crc kubenswrapper[4762]: I1009 15:18:51.212766 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:51 crc kubenswrapper[4762]: I1009 15:18:51.213384 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:18:52 crc kubenswrapper[4762]: I1009 15:18:52.261673 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6qxzx" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" probeResult="failure" output=< Oct 09 15:18:52 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:18:52 crc kubenswrapper[4762]: > Oct 09 15:19:00 crc kubenswrapper[4762]: I1009 15:19:00.965959 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:19:00 crc kubenswrapper[4762]: E1009 15:19:00.966711 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:19:02 crc kubenswrapper[4762]: I1009 15:19:02.269832 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6qxzx" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" probeResult="failure" output=< Oct 09 15:19:02 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:19:02 crc kubenswrapper[4762]: > Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.779422 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.784467 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.813418 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.846114 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.846308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.846494 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b65df\" (UniqueName: \"kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.948996 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.949125 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.949161 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b65df\" (UniqueName: \"kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.949626 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.949895 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:05 crc kubenswrapper[4762]: I1009 15:19:05.976616 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b65df\" (UniqueName: \"kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df\") pod \"certified-operators-vq5wf\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:06 crc kubenswrapper[4762]: I1009 15:19:06.118014 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:06 crc kubenswrapper[4762]: I1009 15:19:06.719984 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:07 crc kubenswrapper[4762]: I1009 15:19:07.055848 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerStarted","Data":"dd578b8950ab4c01655c09fe878083735b68cb33cb3d6be33c8a4d4cfc01e833"} Oct 09 15:19:08 crc kubenswrapper[4762]: I1009 15:19:08.067265 4762 generic.go:334] "Generic (PLEG): container finished" podID="980803e1-b998-4f4a-9e11-64d32628fe04" containerID="e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5" exitCode=0 Oct 09 15:19:08 crc kubenswrapper[4762]: I1009 15:19:08.067320 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerDied","Data":"e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5"} Oct 09 15:19:10 crc kubenswrapper[4762]: I1009 15:19:10.102728 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerStarted","Data":"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1"} Oct 09 15:19:12 crc kubenswrapper[4762]: I1009 15:19:12.262214 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6qxzx" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" probeResult="failure" output=< Oct 09 15:19:12 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:19:12 crc kubenswrapper[4762]: > Oct 09 15:19:13 crc kubenswrapper[4762]: I1009 15:19:13.133732 4762 generic.go:334] "Generic (PLEG): container finished" podID="980803e1-b998-4f4a-9e11-64d32628fe04" containerID="870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1" exitCode=0 Oct 09 15:19:13 crc kubenswrapper[4762]: I1009 15:19:13.133824 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerDied","Data":"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1"} Oct 09 15:19:13 crc kubenswrapper[4762]: I1009 15:19:13.966025 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:19:13 crc kubenswrapper[4762]: E1009 15:19:13.967030 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:19:14 crc kubenswrapper[4762]: I1009 15:19:14.148135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerStarted","Data":"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484"} Oct 09 15:19:16 crc kubenswrapper[4762]: I1009 15:19:16.118149 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:16 crc kubenswrapper[4762]: I1009 15:19:16.118499 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:17 crc kubenswrapper[4762]: I1009 15:19:17.181865 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-vq5wf" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="registry-server" probeResult="failure" output=< Oct 09 15:19:17 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:19:17 crc kubenswrapper[4762]: > Oct 09 15:19:22 crc kubenswrapper[4762]: I1009 15:19:22.258801 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6qxzx" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" probeResult="failure" output=< Oct 09 15:19:22 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:19:22 crc kubenswrapper[4762]: > Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.029122 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vq5wf" podStartSLOduration=15.4017968 podStartE2EDuration="21.029100146s" podCreationTimestamp="2025-10-09 15:19:05 +0000 UTC" firstStartedPulling="2025-10-09 15:19:08.070685739 +0000 UTC m=+6823.844476778" lastFinishedPulling="2025-10-09 15:19:13.697989075 +0000 UTC m=+6829.471780124" observedRunningTime="2025-10-09 15:19:14.167907752 +0000 UTC m=+6829.941698791" watchObservedRunningTime="2025-10-09 15:19:26.029100146 +0000 UTC m=+6841.802891185" Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.036015 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-wg5pg"] Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.045621 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-wg5pg"] Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.172029 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.232877 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.409616 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:26 crc kubenswrapper[4762]: I1009 15:19:26.977996 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b354ad3f-4a44-45e4-b3ff-f3efce2b0640" path="/var/lib/kubelet/pods/b354ad3f-4a44-45e4-b3ff-f3efce2b0640/volumes" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.288951 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vq5wf" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="registry-server" containerID="cri-o://bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484" gracePeriod=2 Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.728157 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.808435 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content\") pod \"980803e1-b998-4f4a-9e11-64d32628fe04\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.808584 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities\") pod \"980803e1-b998-4f4a-9e11-64d32628fe04\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.809005 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b65df\" (UniqueName: \"kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df\") pod \"980803e1-b998-4f4a-9e11-64d32628fe04\" (UID: \"980803e1-b998-4f4a-9e11-64d32628fe04\") " Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.810125 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities" (OuterVolumeSpecName: "utilities") pod "980803e1-b998-4f4a-9e11-64d32628fe04" (UID: "980803e1-b998-4f4a-9e11-64d32628fe04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.815385 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df" (OuterVolumeSpecName: "kube-api-access-b65df") pod "980803e1-b998-4f4a-9e11-64d32628fe04" (UID: "980803e1-b998-4f4a-9e11-64d32628fe04"). InnerVolumeSpecName "kube-api-access-b65df". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.856520 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "980803e1-b998-4f4a-9e11-64d32628fe04" (UID: "980803e1-b998-4f4a-9e11-64d32628fe04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.911062 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.911322 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980803e1-b998-4f4a-9e11-64d32628fe04-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:27 crc kubenswrapper[4762]: I1009 15:19:27.911390 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b65df\" (UniqueName: \"kubernetes.io/projected/980803e1-b998-4f4a-9e11-64d32628fe04-kube-api-access-b65df\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.302697 4762 generic.go:334] "Generic (PLEG): container finished" podID="980803e1-b998-4f4a-9e11-64d32628fe04" containerID="bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484" exitCode=0 Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.302764 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vq5wf" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.302785 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerDied","Data":"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484"} Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.303137 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vq5wf" event={"ID":"980803e1-b998-4f4a-9e11-64d32628fe04","Type":"ContainerDied","Data":"dd578b8950ab4c01655c09fe878083735b68cb33cb3d6be33c8a4d4cfc01e833"} Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.303166 4762 scope.go:117] "RemoveContainer" containerID="bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.336709 4762 scope.go:117] "RemoveContainer" containerID="870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.342822 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.351036 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vq5wf"] Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.359125 4762 scope.go:117] "RemoveContainer" containerID="e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.409821 4762 scope.go:117] "RemoveContainer" containerID="bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484" Oct 09 15:19:28 crc kubenswrapper[4762]: E1009 15:19:28.411223 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484\": container with ID starting with bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484 not found: ID does not exist" containerID="bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.411273 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484"} err="failed to get container status \"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484\": rpc error: code = NotFound desc = could not find container \"bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484\": container with ID starting with bb09e998f6d97ef819cd0b61fe40451186fae17339bcde5144cb9d1cd9cc6484 not found: ID does not exist" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.411309 4762 scope.go:117] "RemoveContainer" containerID="870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1" Oct 09 15:19:28 crc kubenswrapper[4762]: E1009 15:19:28.412810 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1\": container with ID starting with 870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1 not found: ID does not exist" containerID="870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.412841 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1"} err="failed to get container status \"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1\": rpc error: code = NotFound desc = could not find container \"870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1\": container with ID starting with 870e0ea8588219e3ac9f25025abe7d313f0788b8930e568feb24c3647b39a9c1 not found: ID does not exist" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.412858 4762 scope.go:117] "RemoveContainer" containerID="e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5" Oct 09 15:19:28 crc kubenswrapper[4762]: E1009 15:19:28.413272 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5\": container with ID starting with e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5 not found: ID does not exist" containerID="e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.413306 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5"} err="failed to get container status \"e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5\": rpc error: code = NotFound desc = could not find container \"e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5\": container with ID starting with e9e4ba3148d3093bede02cb9914bf2fe7530d3df8d6ba00dd149d0c59dae15b5 not found: ID does not exist" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.965473 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:19:28 crc kubenswrapper[4762]: E1009 15:19:28.965830 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:19:28 crc kubenswrapper[4762]: I1009 15:19:28.977009 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" path="/var/lib/kubelet/pods/980803e1-b998-4f4a-9e11-64d32628fe04/volumes" Oct 09 15:19:31 crc kubenswrapper[4762]: I1009 15:19:31.272771 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:19:31 crc kubenswrapper[4762]: I1009 15:19:31.325917 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:19:31 crc kubenswrapper[4762]: I1009 15:19:31.807286 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.340377 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6qxzx" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" containerID="cri-o://6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a" gracePeriod=2 Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.849766 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.922116 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c5pk\" (UniqueName: \"kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk\") pod \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.922248 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content\") pod \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.922448 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities\") pod \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\" (UID: \"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce\") " Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.924037 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities" (OuterVolumeSpecName: "utilities") pod "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" (UID: "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:19:32 crc kubenswrapper[4762]: I1009 15:19:32.939831 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk" (OuterVolumeSpecName: "kube-api-access-9c5pk") pod "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" (UID: "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce"). InnerVolumeSpecName "kube-api-access-9c5pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.006446 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" (UID: "1889aad4-7ec0-40a1-a6ac-08a3df8e33ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.025503 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.025659 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c5pk\" (UniqueName: \"kubernetes.io/projected/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-kube-api-access-9c5pk\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.025676 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.349884 4762 generic.go:334] "Generic (PLEG): container finished" podID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerID="6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a" exitCode=0 Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.349921 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerDied","Data":"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a"} Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.349952 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6qxzx" event={"ID":"1889aad4-7ec0-40a1-a6ac-08a3df8e33ce","Type":"ContainerDied","Data":"5d174e21d7d7ea51006c99e78189b5e2447f23b35cb27f78cfd967d37a7d2fca"} Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.349951 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6qxzx" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.349968 4762 scope.go:117] "RemoveContainer" containerID="6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.370690 4762 scope.go:117] "RemoveContainer" containerID="a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.384946 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.395419 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6qxzx"] Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.408128 4762 scope.go:117] "RemoveContainer" containerID="61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.440359 4762 scope.go:117] "RemoveContainer" containerID="6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a" Oct 09 15:19:33 crc kubenswrapper[4762]: E1009 15:19:33.440815 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a\": container with ID starting with 6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a not found: ID does not exist" containerID="6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.440881 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a"} err="failed to get container status \"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a\": rpc error: code = NotFound desc = could not find container \"6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a\": container with ID starting with 6f81ef3b779e5df7714d15983d48aeea0f5bf9aed4d7600e0c79a34a4ccdb72a not found: ID does not exist" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.440957 4762 scope.go:117] "RemoveContainer" containerID="a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9" Oct 09 15:19:33 crc kubenswrapper[4762]: E1009 15:19:33.441332 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9\": container with ID starting with a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9 not found: ID does not exist" containerID="a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.441371 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9"} err="failed to get container status \"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9\": rpc error: code = NotFound desc = could not find container \"a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9\": container with ID starting with a1fc6f9f4ac2da187ddaefb8506d4be40beb65ba6ed92e16b9f4bee5ca8e39b9 not found: ID does not exist" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.441393 4762 scope.go:117] "RemoveContainer" containerID="61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009" Oct 09 15:19:33 crc kubenswrapper[4762]: E1009 15:19:33.441587 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009\": container with ID starting with 61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009 not found: ID does not exist" containerID="61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009" Oct 09 15:19:33 crc kubenswrapper[4762]: I1009 15:19:33.441618 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009"} err="failed to get container status \"61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009\": rpc error: code = NotFound desc = could not find container \"61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009\": container with ID starting with 61920d36c8c12356ddf6fb4e4f208a64698d346a2f655612618d16aedbbd3009 not found: ID does not exist" Oct 09 15:19:34 crc kubenswrapper[4762]: I1009 15:19:34.977844 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" path="/var/lib/kubelet/pods/1889aad4-7ec0-40a1-a6ac-08a3df8e33ce/volumes" Oct 09 15:19:36 crc kubenswrapper[4762]: I1009 15:19:36.031882 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-a150-account-create-k9n99"] Oct 09 15:19:36 crc kubenswrapper[4762]: I1009 15:19:36.040107 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-a150-account-create-k9n99"] Oct 09 15:19:36 crc kubenswrapper[4762]: I1009 15:19:36.976847 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e41efec-866f-49b8-b274-9c76764b4833" path="/var/lib/kubelet/pods/6e41efec-866f-49b8-b274-9c76764b4833/volumes" Oct 09 15:19:37 crc kubenswrapper[4762]: I1009 15:19:37.563737 4762 scope.go:117] "RemoveContainer" containerID="7906566976dd453da44b4542e334422f39955f4f84d26f42f9f99afb44cd5a1c" Oct 09 15:19:37 crc kubenswrapper[4762]: I1009 15:19:37.599766 4762 scope.go:117] "RemoveContainer" containerID="d7afa02855ceee3406230000672f1dc9089ed504b4220214955d5e6b409aec5e" Oct 09 15:19:40 crc kubenswrapper[4762]: I1009 15:19:40.965914 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:19:40 crc kubenswrapper[4762]: E1009 15:19:40.966687 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:19:49 crc kubenswrapper[4762]: I1009 15:19:49.041954 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-b8d28"] Oct 09 15:19:49 crc kubenswrapper[4762]: I1009 15:19:49.051050 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-b8d28"] Oct 09 15:19:50 crc kubenswrapper[4762]: I1009 15:19:50.978223 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5480a9a-8108-4a6a-84ee-41811ec3ae31" path="/var/lib/kubelet/pods/f5480a9a-8108-4a6a-84ee-41811ec3ae31/volumes" Oct 09 15:19:55 crc kubenswrapper[4762]: I1009 15:19:55.965111 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:19:55 crc kubenswrapper[4762]: E1009 15:19:55.965763 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.613953 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.616172 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.616274 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.616356 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="extract-utilities" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.616426 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="extract-utilities" Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.616533 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="extract-content" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.616604 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="extract-content" Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.616703 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="extract-content" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.616782 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="extract-content" Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.616869 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.616938 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: E1009 15:20:08.617025 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="extract-utilities" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.617095 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="extract-utilities" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.617407 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="980803e1-b998-4f4a-9e11-64d32628fe04" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.617506 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1889aad4-7ec0-40a1-a6ac-08a3df8e33ce" containerName="registry-server" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.619502 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.623243 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.692356 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.692776 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hp7w\" (UniqueName: \"kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.692919 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.795231 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hp7w\" (UniqueName: \"kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.796858 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.797909 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.797447 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.798447 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.823031 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hp7w\" (UniqueName: \"kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w\") pod \"community-operators-9979h\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:08 crc kubenswrapper[4762]: I1009 15:20:08.945611 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:09 crc kubenswrapper[4762]: I1009 15:20:09.554996 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:09 crc kubenswrapper[4762]: W1009 15:20:09.558672 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9352e39f_8f75_4103_a3cc_45e2cba809ed.slice/crio-cd4e6bf76e24df9924194e742550332472daf366a81d818f6c69257b6830c3b0 WatchSource:0}: Error finding container cd4e6bf76e24df9924194e742550332472daf366a81d818f6c69257b6830c3b0: Status 404 returned error can't find the container with id cd4e6bf76e24df9924194e742550332472daf366a81d818f6c69257b6830c3b0 Oct 09 15:20:09 crc kubenswrapper[4762]: I1009 15:20:09.701180 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerStarted","Data":"cd4e6bf76e24df9924194e742550332472daf366a81d818f6c69257b6830c3b0"} Oct 09 15:20:09 crc kubenswrapper[4762]: I1009 15:20:09.965510 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:20:09 crc kubenswrapper[4762]: E1009 15:20:09.965945 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:20:10 crc kubenswrapper[4762]: I1009 15:20:10.717572 4762 generic.go:334] "Generic (PLEG): container finished" podID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerID="6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747" exitCode=0 Oct 09 15:20:10 crc kubenswrapper[4762]: I1009 15:20:10.717641 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerDied","Data":"6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747"} Oct 09 15:20:11 crc kubenswrapper[4762]: I1009 15:20:11.727693 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerStarted","Data":"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd"} Oct 09 15:20:13 crc kubenswrapper[4762]: I1009 15:20:13.753031 4762 generic.go:334] "Generic (PLEG): container finished" podID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerID="d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd" exitCode=0 Oct 09 15:20:13 crc kubenswrapper[4762]: I1009 15:20:13.753100 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerDied","Data":"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd"} Oct 09 15:20:14 crc kubenswrapper[4762]: I1009 15:20:14.779489 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerStarted","Data":"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591"} Oct 09 15:20:14 crc kubenswrapper[4762]: I1009 15:20:14.801987 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9979h" podStartSLOduration=3.334456789 podStartE2EDuration="6.801971623s" podCreationTimestamp="2025-10-09 15:20:08 +0000 UTC" firstStartedPulling="2025-10-09 15:20:10.719955882 +0000 UTC m=+6886.493746921" lastFinishedPulling="2025-10-09 15:20:14.187470716 +0000 UTC m=+6889.961261755" observedRunningTime="2025-10-09 15:20:14.797811574 +0000 UTC m=+6890.571602613" watchObservedRunningTime="2025-10-09 15:20:14.801971623 +0000 UTC m=+6890.575762662" Oct 09 15:20:18 crc kubenswrapper[4762]: I1009 15:20:18.946183 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:18 crc kubenswrapper[4762]: I1009 15:20:18.947668 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:19 crc kubenswrapper[4762]: I1009 15:20:19.010936 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:19 crc kubenswrapper[4762]: I1009 15:20:19.883288 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:19 crc kubenswrapper[4762]: I1009 15:20:19.935486 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:21 crc kubenswrapper[4762]: I1009 15:20:21.850337 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9979h" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="registry-server" containerID="cri-o://ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591" gracePeriod=2 Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.365803 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.399242 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities\") pod \"9352e39f-8f75-4103-a3cc-45e2cba809ed\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.399408 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hp7w\" (UniqueName: \"kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w\") pod \"9352e39f-8f75-4103-a3cc-45e2cba809ed\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.399446 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content\") pod \"9352e39f-8f75-4103-a3cc-45e2cba809ed\" (UID: \"9352e39f-8f75-4103-a3cc-45e2cba809ed\") " Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.400784 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities" (OuterVolumeSpecName: "utilities") pod "9352e39f-8f75-4103-a3cc-45e2cba809ed" (UID: "9352e39f-8f75-4103-a3cc-45e2cba809ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.405336 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w" (OuterVolumeSpecName: "kube-api-access-8hp7w") pod "9352e39f-8f75-4103-a3cc-45e2cba809ed" (UID: "9352e39f-8f75-4103-a3cc-45e2cba809ed"). InnerVolumeSpecName "kube-api-access-8hp7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.458512 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9352e39f-8f75-4103-a3cc-45e2cba809ed" (UID: "9352e39f-8f75-4103-a3cc-45e2cba809ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.501751 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.501808 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9352e39f-8f75-4103-a3cc-45e2cba809ed-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.501878 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hp7w\" (UniqueName: \"kubernetes.io/projected/9352e39f-8f75-4103-a3cc-45e2cba809ed-kube-api-access-8hp7w\") on node \"crc\" DevicePath \"\"" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.862051 4762 generic.go:334] "Generic (PLEG): container finished" podID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerID="ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591" exitCode=0 Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.862099 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerDied","Data":"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591"} Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.862126 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9979h" event={"ID":"9352e39f-8f75-4103-a3cc-45e2cba809ed","Type":"ContainerDied","Data":"cd4e6bf76e24df9924194e742550332472daf366a81d818f6c69257b6830c3b0"} Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.862128 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9979h" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.862144 4762 scope.go:117] "RemoveContainer" containerID="ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.888323 4762 scope.go:117] "RemoveContainer" containerID="d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.908245 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.919569 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9979h"] Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.927290 4762 scope.go:117] "RemoveContainer" containerID="6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.959544 4762 scope.go:117] "RemoveContainer" containerID="ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591" Oct 09 15:20:22 crc kubenswrapper[4762]: E1009 15:20:22.960025 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591\": container with ID starting with ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591 not found: ID does not exist" containerID="ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.960082 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591"} err="failed to get container status \"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591\": rpc error: code = NotFound desc = could not find container \"ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591\": container with ID starting with ab2677850d57d82b8fd7bad6d605811b98a5d32a2e9723d094bec1292300f591 not found: ID does not exist" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.960131 4762 scope.go:117] "RemoveContainer" containerID="d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd" Oct 09 15:20:22 crc kubenswrapper[4762]: E1009 15:20:22.960669 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd\": container with ID starting with d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd not found: ID does not exist" containerID="d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.960736 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd"} err="failed to get container status \"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd\": rpc error: code = NotFound desc = could not find container \"d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd\": container with ID starting with d4c174149f880c4481e1be91565d961d2ea1577ff0f93042f8be478a85f087cd not found: ID does not exist" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.960766 4762 scope.go:117] "RemoveContainer" containerID="6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747" Oct 09 15:20:22 crc kubenswrapper[4762]: E1009 15:20:22.961281 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747\": container with ID starting with 6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747 not found: ID does not exist" containerID="6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.961305 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747"} err="failed to get container status \"6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747\": rpc error: code = NotFound desc = could not find container \"6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747\": container with ID starting with 6ac79a4e4c908853b1fc3f24905231aac9b9e7ae65cae982a81007f91aca2747 not found: ID does not exist" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.965889 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:20:22 crc kubenswrapper[4762]: E1009 15:20:22.966268 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:20:22 crc kubenswrapper[4762]: I1009 15:20:22.979023 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" path="/var/lib/kubelet/pods/9352e39f-8f75-4103-a3cc-45e2cba809ed/volumes" Oct 09 15:20:33 crc kubenswrapper[4762]: I1009 15:20:33.965409 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:20:33 crc kubenswrapper[4762]: E1009 15:20:33.966279 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:20:37 crc kubenswrapper[4762]: I1009 15:20:37.726495 4762 scope.go:117] "RemoveContainer" containerID="da6659d3acaebd20bdd87d5d466e5ffc912ceadf589a349c1dccaf5de26ca303" Oct 09 15:20:48 crc kubenswrapper[4762]: I1009 15:20:48.965914 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:20:48 crc kubenswrapper[4762]: E1009 15:20:48.966767 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:01 crc kubenswrapper[4762]: I1009 15:21:01.965725 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:21:01 crc kubenswrapper[4762]: E1009 15:21:01.966517 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:15 crc kubenswrapper[4762]: I1009 15:21:15.965491 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:21:15 crc kubenswrapper[4762]: E1009 15:21:15.967101 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:26 crc kubenswrapper[4762]: I1009 15:21:26.965383 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:21:26 crc kubenswrapper[4762]: E1009 15:21:26.966559 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:38 crc kubenswrapper[4762]: I1009 15:21:38.966317 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:21:38 crc kubenswrapper[4762]: E1009 15:21:38.967451 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:53 crc kubenswrapper[4762]: I1009 15:21:53.966393 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:21:53 crc kubenswrapper[4762]: E1009 15:21:53.967757 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:21:56 crc kubenswrapper[4762]: I1009 15:21:56.039998 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-nfz4s"] Oct 09 15:21:56 crc kubenswrapper[4762]: I1009 15:21:56.048297 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-nfz4s"] Oct 09 15:21:56 crc kubenswrapper[4762]: I1009 15:21:56.980748 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0be367a0-7dc6-4755-82c0-79787634bd41" path="/var/lib/kubelet/pods/0be367a0-7dc6-4755-82c0-79787634bd41/volumes" Oct 09 15:22:05 crc kubenswrapper[4762]: I1009 15:22:05.966129 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:22:05 crc kubenswrapper[4762]: E1009 15:22:05.966937 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:22:06 crc kubenswrapper[4762]: I1009 15:22:06.037571 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-2e2d-account-create-487jb"] Oct 09 15:22:06 crc kubenswrapper[4762]: I1009 15:22:06.045324 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-2e2d-account-create-487jb"] Oct 09 15:22:06 crc kubenswrapper[4762]: I1009 15:22:06.983759 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e349088-1b04-404e-ad25-3f0bb4117810" path="/var/lib/kubelet/pods/3e349088-1b04-404e-ad25-3f0bb4117810/volumes" Oct 09 15:22:18 crc kubenswrapper[4762]: I1009 15:22:18.042504 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-68lbb"] Oct 09 15:22:18 crc kubenswrapper[4762]: I1009 15:22:18.053171 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-68lbb"] Oct 09 15:22:18 crc kubenswrapper[4762]: I1009 15:22:18.981692 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b82bad80-25e4-41ef-a414-1dd5a1c9c577" path="/var/lib/kubelet/pods/b82bad80-25e4-41ef-a414-1dd5a1c9c577/volumes" Oct 09 15:22:19 crc kubenswrapper[4762]: I1009 15:22:19.966284 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:22:19 crc kubenswrapper[4762]: E1009 15:22:19.967346 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:22:31 crc kubenswrapper[4762]: I1009 15:22:31.965483 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:22:31 crc kubenswrapper[4762]: E1009 15:22:31.966434 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:22:37 crc kubenswrapper[4762]: I1009 15:22:37.835066 4762 scope.go:117] "RemoveContainer" containerID="2580f2a71cb097c72e814fb8735375837aadd8a5b3bafabad554ca365ee0e926" Oct 09 15:22:37 crc kubenswrapper[4762]: I1009 15:22:37.879540 4762 scope.go:117] "RemoveContainer" containerID="b7b2283e25a389853dabd89197c281a01c494836b17a9650c3793b4c70bcfe2e" Oct 09 15:22:37 crc kubenswrapper[4762]: I1009 15:22:37.945571 4762 scope.go:117] "RemoveContainer" containerID="5b1621bc12e5f6d588e66e567c310e485fea85a69f987292d36d8aab7d8c34ea" Oct 09 15:22:41 crc kubenswrapper[4762]: I1009 15:22:41.027436 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-dsvq5"] Oct 09 15:22:41 crc kubenswrapper[4762]: I1009 15:22:41.035424 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-dsvq5"] Oct 09 15:22:42 crc kubenswrapper[4762]: I1009 15:22:42.965898 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:22:42 crc kubenswrapper[4762]: I1009 15:22:42.981998 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e915789c-9472-42e8-8f48-60fa0b7b2759" path="/var/lib/kubelet/pods/e915789c-9472-42e8-8f48-60fa0b7b2759/volumes" Oct 09 15:22:43 crc kubenswrapper[4762]: I1009 15:22:43.207021 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d"} Oct 09 15:22:59 crc kubenswrapper[4762]: I1009 15:22:59.051951 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-cc93-account-create-7895h"] Oct 09 15:22:59 crc kubenswrapper[4762]: I1009 15:22:59.064813 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-cc93-account-create-7895h"] Oct 09 15:23:00 crc kubenswrapper[4762]: I1009 15:23:00.980906 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec993b02-f2ba-4964-bdfa-45827565c0f2" path="/var/lib/kubelet/pods/ec993b02-f2ba-4964-bdfa-45827565c0f2/volumes" Oct 09 15:23:13 crc kubenswrapper[4762]: I1009 15:23:13.074699 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-nzhdh"] Oct 09 15:23:13 crc kubenswrapper[4762]: I1009 15:23:13.091334 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-nzhdh"] Oct 09 15:23:14 crc kubenswrapper[4762]: I1009 15:23:14.986873 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b" path="/var/lib/kubelet/pods/bc3e58fe-7c7d-4ad2-b8f8-d11078d3a03b/volumes" Oct 09 15:23:38 crc kubenswrapper[4762]: I1009 15:23:38.091839 4762 scope.go:117] "RemoveContainer" containerID="0b4ba54b0672cfd8230f3d68c71508da5f5365fdde716ec0e9369b418092748c" Oct 09 15:23:38 crc kubenswrapper[4762]: I1009 15:23:38.138693 4762 scope.go:117] "RemoveContainer" containerID="145e1a42958431e053cc208dd55549c363a1133d2f6f0337c7d7bacf568b3afa" Oct 09 15:23:38 crc kubenswrapper[4762]: I1009 15:23:38.177759 4762 scope.go:117] "RemoveContainer" containerID="54eedc2be0b0c3c1af0fe45c2312e5fe5ca89719a0adefb4f226892354be3a07" Oct 09 15:25:11 crc kubenswrapper[4762]: I1009 15:25:11.969811 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:25:11 crc kubenswrapper[4762]: I1009 15:25:11.970454 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:25:41 crc kubenswrapper[4762]: I1009 15:25:41.969176 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:25:41 crc kubenswrapper[4762]: I1009 15:25:41.969695 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:26:11 crc kubenswrapper[4762]: I1009 15:26:11.969736 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:26:11 crc kubenswrapper[4762]: I1009 15:26:11.970692 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:26:11 crc kubenswrapper[4762]: I1009 15:26:11.970761 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:26:11 crc kubenswrapper[4762]: I1009 15:26:11.971471 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:26:11 crc kubenswrapper[4762]: I1009 15:26:11.971538 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d" gracePeriod=600 Oct 09 15:26:12 crc kubenswrapper[4762]: I1009 15:26:12.296117 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d" exitCode=0 Oct 09 15:26:12 crc kubenswrapper[4762]: I1009 15:26:12.296210 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d"} Oct 09 15:26:12 crc kubenswrapper[4762]: I1009 15:26:12.296787 4762 scope.go:117] "RemoveContainer" containerID="8e2d4f723542598f292724fd00d9cd4248c9344d342367b4808c9027b72408a1" Oct 09 15:26:13 crc kubenswrapper[4762]: I1009 15:26:13.308322 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6"} Oct 09 15:26:20 crc kubenswrapper[4762]: I1009 15:26:20.373724 4762 generic.go:334] "Generic (PLEG): container finished" podID="37673896-c4d5-42e0-934f-1a36d759b2e2" containerID="b5b2db53aceb954952df31de74be47aa80cdf4a0f7f7254858662dce7dcec679" exitCode=0 Oct 09 15:26:20 crc kubenswrapper[4762]: I1009 15:26:20.373789 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" event={"ID":"37673896-c4d5-42e0-934f-1a36d759b2e2","Type":"ContainerDied","Data":"b5b2db53aceb954952df31de74be47aa80cdf4a0f7f7254858662dce7dcec679"} Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.836303 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.994517 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle\") pod \"37673896-c4d5-42e0-934f-1a36d759b2e2\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.994597 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory\") pod \"37673896-c4d5-42e0-934f-1a36d759b2e2\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.995884 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key\") pod \"37673896-c4d5-42e0-934f-1a36d759b2e2\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.996128 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph\") pod \"37673896-c4d5-42e0-934f-1a36d759b2e2\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " Oct 09 15:26:21 crc kubenswrapper[4762]: I1009 15:26:21.996178 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjx6m\" (UniqueName: \"kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m\") pod \"37673896-c4d5-42e0-934f-1a36d759b2e2\" (UID: \"37673896-c4d5-42e0-934f-1a36d759b2e2\") " Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.005909 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph" (OuterVolumeSpecName: "ceph") pod "37673896-c4d5-42e0-934f-1a36d759b2e2" (UID: "37673896-c4d5-42e0-934f-1a36d759b2e2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.005958 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m" (OuterVolumeSpecName: "kube-api-access-hjx6m") pod "37673896-c4d5-42e0-934f-1a36d759b2e2" (UID: "37673896-c4d5-42e0-934f-1a36d759b2e2"). InnerVolumeSpecName "kube-api-access-hjx6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.010162 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "37673896-c4d5-42e0-934f-1a36d759b2e2" (UID: "37673896-c4d5-42e0-934f-1a36d759b2e2"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.032496 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "37673896-c4d5-42e0-934f-1a36d759b2e2" (UID: "37673896-c4d5-42e0-934f-1a36d759b2e2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.032971 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory" (OuterVolumeSpecName: "inventory") pod "37673896-c4d5-42e0-934f-1a36d759b2e2" (UID: "37673896-c4d5-42e0-934f-1a36d759b2e2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.099498 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.099540 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjx6m\" (UniqueName: \"kubernetes.io/projected/37673896-c4d5-42e0-934f-1a36d759b2e2-kube-api-access-hjx6m\") on node \"crc\" DevicePath \"\"" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.099556 4762 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.099569 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.099581 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37673896-c4d5-42e0-934f-1a36d759b2e2-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.396141 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" event={"ID":"37673896-c4d5-42e0-934f-1a36d759b2e2","Type":"ContainerDied","Data":"b3df50dfb6e6bf803329e362f0b2d82ea24538493584d22d7b316a853361f3b6"} Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.396193 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5" Oct 09 15:26:22 crc kubenswrapper[4762]: I1009 15:26:22.396211 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3df50dfb6e6bf803329e362f0b2d82ea24538493584d22d7b316a853361f3b6" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.251168 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-ft7tq"] Oct 09 15:26:30 crc kubenswrapper[4762]: E1009 15:26:30.252401 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="registry-server" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252417 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="registry-server" Oct 09 15:26:30 crc kubenswrapper[4762]: E1009 15:26:30.252434 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="extract-content" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252441 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="extract-content" Oct 09 15:26:30 crc kubenswrapper[4762]: E1009 15:26:30.252490 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="extract-utilities" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252500 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="extract-utilities" Oct 09 15:26:30 crc kubenswrapper[4762]: E1009 15:26:30.252512 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37673896-c4d5-42e0-934f-1a36d759b2e2" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252521 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="37673896-c4d5-42e0-934f-1a36d759b2e2" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252799 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="37673896-c4d5-42e0-934f-1a36d759b2e2" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.252819 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9352e39f-8f75-4103-a3cc-45e2cba809ed" containerName="registry-server" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.253713 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.265866 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.266274 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.266346 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.266587 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.283034 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-ft7tq"] Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.396936 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl6rd\" (UniqueName: \"kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.397011 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.397691 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.397866 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.398109 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.500517 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl6rd\" (UniqueName: \"kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.500596 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.500673 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.500739 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.500845 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.506695 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.507623 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.507933 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.520464 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.521331 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl6rd\" (UniqueName: \"kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd\") pod \"bootstrap-openstack-openstack-cell1-ft7tq\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:30 crc kubenswrapper[4762]: I1009 15:26:30.581176 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:26:31 crc kubenswrapper[4762]: I1009 15:26:31.278234 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-ft7tq"] Oct 09 15:26:31 crc kubenswrapper[4762]: W1009 15:26:31.281706 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74f4464f_713c_4ace_9657_31e83a483ae7.slice/crio-8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e WatchSource:0}: Error finding container 8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e: Status 404 returned error can't find the container with id 8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e Oct 09 15:26:31 crc kubenswrapper[4762]: I1009 15:26:31.286425 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:26:31 crc kubenswrapper[4762]: I1009 15:26:31.485381 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" event={"ID":"74f4464f-713c-4ace-9657-31e83a483ae7","Type":"ContainerStarted","Data":"8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e"} Oct 09 15:26:32 crc kubenswrapper[4762]: I1009 15:26:32.497884 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" event={"ID":"74f4464f-713c-4ace-9657-31e83a483ae7","Type":"ContainerStarted","Data":"e8cf58caca86409ba7dc93df12fc3accf2f0407583d30daa06a176e34538cceb"} Oct 09 15:26:32 crc kubenswrapper[4762]: I1009 15:26:32.518847 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" podStartSLOduration=1.871036729 podStartE2EDuration="2.518824716s" podCreationTimestamp="2025-10-09 15:26:30 +0000 UTC" firstStartedPulling="2025-10-09 15:26:31.286179607 +0000 UTC m=+7267.059970646" lastFinishedPulling="2025-10-09 15:26:31.933967594 +0000 UTC m=+7267.707758633" observedRunningTime="2025-10-09 15:26:32.512831709 +0000 UTC m=+7268.286622748" watchObservedRunningTime="2025-10-09 15:26:32.518824716 +0000 UTC m=+7268.292615765" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.725559 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.728602 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.736240 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.755692 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.755752 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8npm7\" (UniqueName: \"kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.756017 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.858205 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.858311 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.858339 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8npm7\" (UniqueName: \"kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.859408 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.859510 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:22 crc kubenswrapper[4762]: I1009 15:27:22.880919 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8npm7\" (UniqueName: \"kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7\") pod \"redhat-marketplace-g75md\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:23 crc kubenswrapper[4762]: I1009 15:27:23.067374 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:23 crc kubenswrapper[4762]: I1009 15:27:23.533715 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:24 crc kubenswrapper[4762]: I1009 15:27:24.046921 4762 generic.go:334] "Generic (PLEG): container finished" podID="6094cdc4-6233-4944-840b-7869554c9752" containerID="e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55" exitCode=0 Oct 09 15:27:24 crc kubenswrapper[4762]: I1009 15:27:24.048003 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerDied","Data":"e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55"} Oct 09 15:27:24 crc kubenswrapper[4762]: I1009 15:27:24.048046 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerStarted","Data":"deb52e1c9bcd52e7b2685eae08d244b2a7a76897ea41d1cba9aa099f3f2c1ccf"} Oct 09 15:27:25 crc kubenswrapper[4762]: I1009 15:27:25.058151 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerStarted","Data":"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f"} Oct 09 15:27:26 crc kubenswrapper[4762]: I1009 15:27:26.069740 4762 generic.go:334] "Generic (PLEG): container finished" podID="6094cdc4-6233-4944-840b-7869554c9752" containerID="559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f" exitCode=0 Oct 09 15:27:26 crc kubenswrapper[4762]: I1009 15:27:26.069782 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerDied","Data":"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f"} Oct 09 15:27:27 crc kubenswrapper[4762]: I1009 15:27:27.082152 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerStarted","Data":"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc"} Oct 09 15:27:27 crc kubenswrapper[4762]: I1009 15:27:27.106106 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g75md" podStartSLOduration=2.563167021 podStartE2EDuration="5.10608322s" podCreationTimestamp="2025-10-09 15:27:22 +0000 UTC" firstStartedPulling="2025-10-09 15:27:24.066864005 +0000 UTC m=+7319.840655044" lastFinishedPulling="2025-10-09 15:27:26.609780204 +0000 UTC m=+7322.383571243" observedRunningTime="2025-10-09 15:27:27.105922666 +0000 UTC m=+7322.879713705" watchObservedRunningTime="2025-10-09 15:27:27.10608322 +0000 UTC m=+7322.879874259" Oct 09 15:27:33 crc kubenswrapper[4762]: I1009 15:27:33.068088 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:33 crc kubenswrapper[4762]: I1009 15:27:33.068735 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:33 crc kubenswrapper[4762]: I1009 15:27:33.133431 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:33 crc kubenswrapper[4762]: I1009 15:27:33.189236 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:33 crc kubenswrapper[4762]: I1009 15:27:33.371063 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.150310 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g75md" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="registry-server" containerID="cri-o://53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc" gracePeriod=2 Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.662788 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.840090 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8npm7\" (UniqueName: \"kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7\") pod \"6094cdc4-6233-4944-840b-7869554c9752\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.840563 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content\") pod \"6094cdc4-6233-4944-840b-7869554c9752\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.841738 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities\") pod \"6094cdc4-6233-4944-840b-7869554c9752\" (UID: \"6094cdc4-6233-4944-840b-7869554c9752\") " Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.842776 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities" (OuterVolumeSpecName: "utilities") pod "6094cdc4-6233-4944-840b-7869554c9752" (UID: "6094cdc4-6233-4944-840b-7869554c9752"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.844121 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.852031 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7" (OuterVolumeSpecName: "kube-api-access-8npm7") pod "6094cdc4-6233-4944-840b-7869554c9752" (UID: "6094cdc4-6233-4944-840b-7869554c9752"). InnerVolumeSpecName "kube-api-access-8npm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.859073 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6094cdc4-6233-4944-840b-7869554c9752" (UID: "6094cdc4-6233-4944-840b-7869554c9752"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.945562 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8npm7\" (UniqueName: \"kubernetes.io/projected/6094cdc4-6233-4944-840b-7869554c9752-kube-api-access-8npm7\") on node \"crc\" DevicePath \"\"" Oct 09 15:27:35 crc kubenswrapper[4762]: I1009 15:27:35.945618 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6094cdc4-6233-4944-840b-7869554c9752-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.164666 4762 generic.go:334] "Generic (PLEG): container finished" podID="6094cdc4-6233-4944-840b-7869554c9752" containerID="53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc" exitCode=0 Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.164858 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerDied","Data":"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc"} Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.165032 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g75md" event={"ID":"6094cdc4-6233-4944-840b-7869554c9752","Type":"ContainerDied","Data":"deb52e1c9bcd52e7b2685eae08d244b2a7a76897ea41d1cba9aa099f3f2c1ccf"} Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.165058 4762 scope.go:117] "RemoveContainer" containerID="53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.164923 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g75md" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.199831 4762 scope.go:117] "RemoveContainer" containerID="559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.206507 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.231958 4762 scope.go:117] "RemoveContainer" containerID="e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.240063 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g75md"] Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.277258 4762 scope.go:117] "RemoveContainer" containerID="53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc" Oct 09 15:27:36 crc kubenswrapper[4762]: E1009 15:27:36.278002 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc\": container with ID starting with 53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc not found: ID does not exist" containerID="53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.278053 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc"} err="failed to get container status \"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc\": rpc error: code = NotFound desc = could not find container \"53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc\": container with ID starting with 53a30579e05e536183cd25c536adde7dd1b31463596611f604a16dfec62d23dc not found: ID does not exist" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.278078 4762 scope.go:117] "RemoveContainer" containerID="559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f" Oct 09 15:27:36 crc kubenswrapper[4762]: E1009 15:27:36.280208 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f\": container with ID starting with 559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f not found: ID does not exist" containerID="559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.280277 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f"} err="failed to get container status \"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f\": rpc error: code = NotFound desc = could not find container \"559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f\": container with ID starting with 559f35a71cf7d8683987e0b2dd857a2f559c4ef24f4ad3d6d027397b27a6c25f not found: ID does not exist" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.280293 4762 scope.go:117] "RemoveContainer" containerID="e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55" Oct 09 15:27:36 crc kubenswrapper[4762]: E1009 15:27:36.280821 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55\": container with ID starting with e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55 not found: ID does not exist" containerID="e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.280880 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55"} err="failed to get container status \"e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55\": rpc error: code = NotFound desc = could not find container \"e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55\": container with ID starting with e1345ae0fb2fbbef7c120ac754f195ec8f753aaefd155d0421ad7b3a42285d55 not found: ID does not exist" Oct 09 15:27:36 crc kubenswrapper[4762]: I1009 15:27:36.976604 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6094cdc4-6233-4944-840b-7869554c9752" path="/var/lib/kubelet/pods/6094cdc4-6233-4944-840b-7869554c9752/volumes" Oct 09 15:28:41 crc kubenswrapper[4762]: I1009 15:28:41.969021 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:28:41 crc kubenswrapper[4762]: I1009 15:28:41.969744 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.611735 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:28:59 crc kubenswrapper[4762]: E1009 15:28:59.613316 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="extract-content" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.613346 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="extract-content" Oct 09 15:28:59 crc kubenswrapper[4762]: E1009 15:28:59.613400 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="registry-server" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.613413 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="registry-server" Oct 09 15:28:59 crc kubenswrapper[4762]: E1009 15:28:59.613442 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="extract-utilities" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.613458 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="extract-utilities" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.614085 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="6094cdc4-6233-4944-840b-7869554c9752" containerName="registry-server" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.616440 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.640940 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.752886 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl9sr\" (UniqueName: \"kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.752948 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.752992 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.854949 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl9sr\" (UniqueName: \"kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.855022 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.855069 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.855794 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.856298 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.910438 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl9sr\" (UniqueName: \"kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr\") pod \"redhat-operators-c6b6b\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:28:59 crc kubenswrapper[4762]: I1009 15:28:59.946238 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:00 crc kubenswrapper[4762]: I1009 15:29:00.526630 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:29:01 crc kubenswrapper[4762]: I1009 15:29:01.068086 4762 generic.go:334] "Generic (PLEG): container finished" podID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerID="0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615" exitCode=0 Oct 09 15:29:01 crc kubenswrapper[4762]: I1009 15:29:01.068415 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerDied","Data":"0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615"} Oct 09 15:29:01 crc kubenswrapper[4762]: I1009 15:29:01.068442 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerStarted","Data":"354de83a7a9b2856ff4b5e0fd5e6d1e3eb9be6cdda6c64dc9bc9657e5aef9f45"} Oct 09 15:29:02 crc kubenswrapper[4762]: I1009 15:29:02.078906 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerStarted","Data":"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf"} Oct 09 15:29:08 crc kubenswrapper[4762]: I1009 15:29:08.131799 4762 generic.go:334] "Generic (PLEG): container finished" podID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerID="e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf" exitCode=0 Oct 09 15:29:08 crc kubenswrapper[4762]: I1009 15:29:08.131868 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerDied","Data":"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf"} Oct 09 15:29:09 crc kubenswrapper[4762]: I1009 15:29:09.169107 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerStarted","Data":"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2"} Oct 09 15:29:09 crc kubenswrapper[4762]: I1009 15:29:09.195244 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c6b6b" podStartSLOduration=2.608458398 podStartE2EDuration="10.195216858s" podCreationTimestamp="2025-10-09 15:28:59 +0000 UTC" firstStartedPulling="2025-10-09 15:29:01.069993848 +0000 UTC m=+7416.843784887" lastFinishedPulling="2025-10-09 15:29:08.656752308 +0000 UTC m=+7424.430543347" observedRunningTime="2025-10-09 15:29:09.188300087 +0000 UTC m=+7424.962091136" watchObservedRunningTime="2025-10-09 15:29:09.195216858 +0000 UTC m=+7424.969007897" Oct 09 15:29:09 crc kubenswrapper[4762]: I1009 15:29:09.948787 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:09 crc kubenswrapper[4762]: I1009 15:29:09.949141 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:11 crc kubenswrapper[4762]: I1009 15:29:11.005103 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c6b6b" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="registry-server" probeResult="failure" output=< Oct 09 15:29:11 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:29:11 crc kubenswrapper[4762]: > Oct 09 15:29:11 crc kubenswrapper[4762]: I1009 15:29:11.969539 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:29:11 crc kubenswrapper[4762]: I1009 15:29:11.970527 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:29:20 crc kubenswrapper[4762]: I1009 15:29:20.001931 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:20 crc kubenswrapper[4762]: I1009 15:29:20.057426 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:20 crc kubenswrapper[4762]: I1009 15:29:20.241227 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.273580 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c6b6b" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="registry-server" containerID="cri-o://fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2" gracePeriod=2 Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.829503 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.944419 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities\") pod \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.944648 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl9sr\" (UniqueName: \"kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr\") pod \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.944722 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content\") pod \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\" (UID: \"f45a1590-a2b7-49aa-9af5-3aca15654e6a\") " Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.945736 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities" (OuterVolumeSpecName: "utilities") pod "f45a1590-a2b7-49aa-9af5-3aca15654e6a" (UID: "f45a1590-a2b7-49aa-9af5-3aca15654e6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:29:21 crc kubenswrapper[4762]: I1009 15:29:21.954974 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr" (OuterVolumeSpecName: "kube-api-access-fl9sr") pod "f45a1590-a2b7-49aa-9af5-3aca15654e6a" (UID: "f45a1590-a2b7-49aa-9af5-3aca15654e6a"). InnerVolumeSpecName "kube-api-access-fl9sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.032254 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f45a1590-a2b7-49aa-9af5-3aca15654e6a" (UID: "f45a1590-a2b7-49aa-9af5-3aca15654e6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.047527 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl9sr\" (UniqueName: \"kubernetes.io/projected/f45a1590-a2b7-49aa-9af5-3aca15654e6a-kube-api-access-fl9sr\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.047563 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.047575 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f45a1590-a2b7-49aa-9af5-3aca15654e6a-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.285426 4762 generic.go:334] "Generic (PLEG): container finished" podID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerID="fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2" exitCode=0 Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.285486 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c6b6b" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.285521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerDied","Data":"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2"} Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.286919 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c6b6b" event={"ID":"f45a1590-a2b7-49aa-9af5-3aca15654e6a","Type":"ContainerDied","Data":"354de83a7a9b2856ff4b5e0fd5e6d1e3eb9be6cdda6c64dc9bc9657e5aef9f45"} Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.286945 4762 scope.go:117] "RemoveContainer" containerID="fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.326496 4762 scope.go:117] "RemoveContainer" containerID="e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.334326 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.347256 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c6b6b"] Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.360248 4762 scope.go:117] "RemoveContainer" containerID="0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.409709 4762 scope.go:117] "RemoveContainer" containerID="fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2" Oct 09 15:29:22 crc kubenswrapper[4762]: E1009 15:29:22.410208 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2\": container with ID starting with fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2 not found: ID does not exist" containerID="fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.410263 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2"} err="failed to get container status \"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2\": rpc error: code = NotFound desc = could not find container \"fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2\": container with ID starting with fa5669a6c9ed7c6a356147f376cc28c5fcfc4c83cf63484b80d84bbe793f23d2 not found: ID does not exist" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.410289 4762 scope.go:117] "RemoveContainer" containerID="e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf" Oct 09 15:29:22 crc kubenswrapper[4762]: E1009 15:29:22.410748 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf\": container with ID starting with e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf not found: ID does not exist" containerID="e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.410779 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf"} err="failed to get container status \"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf\": rpc error: code = NotFound desc = could not find container \"e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf\": container with ID starting with e58bdd8d3a0eab6084236691970501fcb0c81509e7f34c2da40e65518d2f5acf not found: ID does not exist" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.410799 4762 scope.go:117] "RemoveContainer" containerID="0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615" Oct 09 15:29:22 crc kubenswrapper[4762]: E1009 15:29:22.411050 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615\": container with ID starting with 0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615 not found: ID does not exist" containerID="0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.411091 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615"} err="failed to get container status \"0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615\": rpc error: code = NotFound desc = could not find container \"0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615\": container with ID starting with 0fe91274eb4a812fcedc54ec3b487a2dee1a5cafe2090ad871a616c10561a615 not found: ID does not exist" Oct 09 15:29:22 crc kubenswrapper[4762]: I1009 15:29:22.977383 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" path="/var/lib/kubelet/pods/f45a1590-a2b7-49aa-9af5-3aca15654e6a/volumes" Oct 09 15:29:40 crc kubenswrapper[4762]: I1009 15:29:40.467900 4762 generic.go:334] "Generic (PLEG): container finished" podID="74f4464f-713c-4ace-9657-31e83a483ae7" containerID="e8cf58caca86409ba7dc93df12fc3accf2f0407583d30daa06a176e34538cceb" exitCode=0 Oct 09 15:29:40 crc kubenswrapper[4762]: I1009 15:29:40.467997 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" event={"ID":"74f4464f-713c-4ace-9657-31e83a483ae7","Type":"ContainerDied","Data":"e8cf58caca86409ba7dc93df12fc3accf2f0407583d30daa06a176e34538cceb"} Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.901286 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.968967 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.969016 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.969066 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.970253 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:29:41 crc kubenswrapper[4762]: I1009 15:29:41.970335 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" gracePeriod=600 Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.004193 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle\") pod \"74f4464f-713c-4ace-9657-31e83a483ae7\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.004306 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl6rd\" (UniqueName: \"kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd\") pod \"74f4464f-713c-4ace-9657-31e83a483ae7\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.004383 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory\") pod \"74f4464f-713c-4ace-9657-31e83a483ae7\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.004456 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph\") pod \"74f4464f-713c-4ace-9657-31e83a483ae7\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.004553 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key\") pod \"74f4464f-713c-4ace-9657-31e83a483ae7\" (UID: \"74f4464f-713c-4ace-9657-31e83a483ae7\") " Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.013390 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd" (OuterVolumeSpecName: "kube-api-access-gl6rd") pod "74f4464f-713c-4ace-9657-31e83a483ae7" (UID: "74f4464f-713c-4ace-9657-31e83a483ae7"). InnerVolumeSpecName "kube-api-access-gl6rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.013905 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph" (OuterVolumeSpecName: "ceph") pod "74f4464f-713c-4ace-9657-31e83a483ae7" (UID: "74f4464f-713c-4ace-9657-31e83a483ae7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.026349 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "74f4464f-713c-4ace-9657-31e83a483ae7" (UID: "74f4464f-713c-4ace-9657-31e83a483ae7"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.037095 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "74f4464f-713c-4ace-9657-31e83a483ae7" (UID: "74f4464f-713c-4ace-9657-31e83a483ae7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.037526 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory" (OuterVolumeSpecName: "inventory") pod "74f4464f-713c-4ace-9657-31e83a483ae7" (UID: "74f4464f-713c-4ace-9657-31e83a483ae7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.093082 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.106602 4762 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.106651 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl6rd\" (UniqueName: \"kubernetes.io/projected/74f4464f-713c-4ace-9657-31e83a483ae7-kube-api-access-gl6rd\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.106661 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.106669 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.106678 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/74f4464f-713c-4ace-9657-31e83a483ae7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.487000 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" exitCode=0 Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.487085 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6"} Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.487142 4762 scope.go:117] "RemoveContainer" containerID="b44086fbd997526ffb6ace45e20f557776d1bda26bb1a653f927fc2b22f9e57d" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.487917 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.488241 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.489710 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" event={"ID":"74f4464f-713c-4ace-9657-31e83a483ae7","Type":"ContainerDied","Data":"8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e"} Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.489738 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b22130e2894253b02015e2a582eb420e692eb193fbc22edbe56a4b5cb827c6e" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.489782 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-ft7tq" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.586107 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4dtq9"] Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.589932 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="registry-server" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.589974 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="registry-server" Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.589992 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="extract-utilities" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.590001 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="extract-utilities" Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.590020 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74f4464f-713c-4ace-9657-31e83a483ae7" containerName="bootstrap-openstack-openstack-cell1" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.590029 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="74f4464f-713c-4ace-9657-31e83a483ae7" containerName="bootstrap-openstack-openstack-cell1" Oct 09 15:29:42 crc kubenswrapper[4762]: E1009 15:29:42.590050 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="extract-content" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.590059 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="extract-content" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.590869 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="74f4464f-713c-4ace-9657-31e83a483ae7" containerName="bootstrap-openstack-openstack-cell1" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.590897 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f45a1590-a2b7-49aa-9af5-3aca15654e6a" containerName="registry-server" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.593817 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.597427 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4dtq9"] Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.603483 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.603957 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.603949 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.604091 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.720102 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.720285 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.720429 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.720494 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2gbq\" (UniqueName: \"kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.821910 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.822001 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.822117 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.822245 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2gbq\" (UniqueName: \"kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.826269 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.827303 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.840437 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.841901 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2gbq\" (UniqueName: \"kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq\") pod \"download-cache-openstack-openstack-cell1-4dtq9\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:42 crc kubenswrapper[4762]: I1009 15:29:42.928301 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:29:43 crc kubenswrapper[4762]: I1009 15:29:43.481659 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-4dtq9"] Oct 09 15:29:43 crc kubenswrapper[4762]: I1009 15:29:43.507131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" event={"ID":"53168398-1447-4747-9b52-02f61f828508","Type":"ContainerStarted","Data":"a77fe79fea099f91492d2ae86377632b2e0d7012b5e1b74d05425cc55681fbdf"} Oct 09 15:29:44 crc kubenswrapper[4762]: I1009 15:29:44.517688 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" event={"ID":"53168398-1447-4747-9b52-02f61f828508","Type":"ContainerStarted","Data":"5cd31fe1a7c2f00c3baedff8ea5d8efe09a0ed8e9d6f7757df50e2fba494f933"} Oct 09 15:29:44 crc kubenswrapper[4762]: I1009 15:29:44.540143 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" podStartSLOduration=2.057864913 podStartE2EDuration="2.540125014s" podCreationTimestamp="2025-10-09 15:29:42 +0000 UTC" firstStartedPulling="2025-10-09 15:29:43.492331557 +0000 UTC m=+7459.266122596" lastFinishedPulling="2025-10-09 15:29:43.974591658 +0000 UTC m=+7459.748382697" observedRunningTime="2025-10-09 15:29:44.536586992 +0000 UTC m=+7460.310378031" watchObservedRunningTime="2025-10-09 15:29:44.540125014 +0000 UTC m=+7460.313916053" Oct 09 15:29:53 crc kubenswrapper[4762]: I1009 15:29:53.965221 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:29:53 crc kubenswrapper[4762]: E1009 15:29:53.966009 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.136739 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7"] Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.139274 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.143018 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.143282 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.188097 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7"] Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.195435 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.195500 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.195609 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f89vr\" (UniqueName: \"kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.297627 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.298079 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.298245 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f89vr\" (UniqueName: \"kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.298952 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.303916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.315677 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f89vr\" (UniqueName: \"kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr\") pod \"collect-profiles-29333730-cdgz7\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:00 crc kubenswrapper[4762]: I1009 15:30:00.489993 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:01 crc kubenswrapper[4762]: I1009 15:30:01.014798 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7"] Oct 09 15:30:01 crc kubenswrapper[4762]: I1009 15:30:01.676980 4762 generic.go:334] "Generic (PLEG): container finished" podID="74c6b48b-c330-41fa-b422-0f80defc94d2" containerID="d23d2de335d69e0846d8b82920880fde6142785994d5d77ed400bb5c2210122d" exitCode=0 Oct 09 15:30:01 crc kubenswrapper[4762]: I1009 15:30:01.677189 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" event={"ID":"74c6b48b-c330-41fa-b422-0f80defc94d2","Type":"ContainerDied","Data":"d23d2de335d69e0846d8b82920880fde6142785994d5d77ed400bb5c2210122d"} Oct 09 15:30:01 crc kubenswrapper[4762]: I1009 15:30:01.677661 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" event={"ID":"74c6b48b-c330-41fa-b422-0f80defc94d2","Type":"ContainerStarted","Data":"8e0e3ea75500cc590131c0b27ec335d6d85aa785aace1c5eeddaa84510273016"} Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.097862 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.163407 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f89vr\" (UniqueName: \"kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr\") pod \"74c6b48b-c330-41fa-b422-0f80defc94d2\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.163691 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume\") pod \"74c6b48b-c330-41fa-b422-0f80defc94d2\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.163873 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume\") pod \"74c6b48b-c330-41fa-b422-0f80defc94d2\" (UID: \"74c6b48b-c330-41fa-b422-0f80defc94d2\") " Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.164381 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume" (OuterVolumeSpecName: "config-volume") pod "74c6b48b-c330-41fa-b422-0f80defc94d2" (UID: "74c6b48b-c330-41fa-b422-0f80defc94d2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.164672 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74c6b48b-c330-41fa-b422-0f80defc94d2-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.170615 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr" (OuterVolumeSpecName: "kube-api-access-f89vr") pod "74c6b48b-c330-41fa-b422-0f80defc94d2" (UID: "74c6b48b-c330-41fa-b422-0f80defc94d2"). InnerVolumeSpecName "kube-api-access-f89vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.170665 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "74c6b48b-c330-41fa-b422-0f80defc94d2" (UID: "74c6b48b-c330-41fa-b422-0f80defc94d2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.267211 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f89vr\" (UniqueName: \"kubernetes.io/projected/74c6b48b-c330-41fa-b422-0f80defc94d2-kube-api-access-f89vr\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.268265 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74c6b48b-c330-41fa-b422-0f80defc94d2-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.697144 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" event={"ID":"74c6b48b-c330-41fa-b422-0f80defc94d2","Type":"ContainerDied","Data":"8e0e3ea75500cc590131c0b27ec335d6d85aa785aace1c5eeddaa84510273016"} Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.697180 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e0e3ea75500cc590131c0b27ec335d6d85aa785aace1c5eeddaa84510273016" Oct 09 15:30:03 crc kubenswrapper[4762]: I1009 15:30:03.697240 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7" Oct 09 15:30:04 crc kubenswrapper[4762]: I1009 15:30:04.169173 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl"] Oct 09 15:30:04 crc kubenswrapper[4762]: I1009 15:30:04.179133 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333685-flqvl"] Oct 09 15:30:04 crc kubenswrapper[4762]: I1009 15:30:04.973277 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:30:04 crc kubenswrapper[4762]: E1009 15:30:04.973578 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:30:04 crc kubenswrapper[4762]: I1009 15:30:04.982865 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d426004b-92b4-4193-bdf8-b40d9e48d018" path="/var/lib/kubelet/pods/d426004b-92b4-4193-bdf8-b40d9e48d018/volumes" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.416928 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:15 crc kubenswrapper[4762]: E1009 15:30:15.418121 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74c6b48b-c330-41fa-b422-0f80defc94d2" containerName="collect-profiles" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.418139 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="74c6b48b-c330-41fa-b422-0f80defc94d2" containerName="collect-profiles" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.418446 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="74c6b48b-c330-41fa-b422-0f80defc94d2" containerName="collect-profiles" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.424815 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.459373 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.548685 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfthx\" (UniqueName: \"kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.548752 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.548836 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.650893 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfthx\" (UniqueName: \"kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.651152 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.651194 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.651941 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.652541 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.673157 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfthx\" (UniqueName: \"kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx\") pod \"certified-operators-mv2z8\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.772273 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:15 crc kubenswrapper[4762]: I1009 15:30:15.966368 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:30:15 crc kubenswrapper[4762]: E1009 15:30:15.966688 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:30:16 crc kubenswrapper[4762]: I1009 15:30:16.269274 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:16 crc kubenswrapper[4762]: I1009 15:30:16.833136 4762 generic.go:334] "Generic (PLEG): container finished" podID="b6733753-b079-4895-893e-043ac2af7296" containerID="c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569" exitCode=0 Oct 09 15:30:16 crc kubenswrapper[4762]: I1009 15:30:16.833205 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerDied","Data":"c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569"} Oct 09 15:30:16 crc kubenswrapper[4762]: I1009 15:30:16.833414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerStarted","Data":"928fa1091ec9a258ee582e35f27823d5470128f77504c164a17c79dd3a22d4b3"} Oct 09 15:30:18 crc kubenswrapper[4762]: I1009 15:30:18.853131 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerStarted","Data":"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a"} Oct 09 15:30:19 crc kubenswrapper[4762]: I1009 15:30:19.876448 4762 generic.go:334] "Generic (PLEG): container finished" podID="b6733753-b079-4895-893e-043ac2af7296" containerID="b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a" exitCode=0 Oct 09 15:30:19 crc kubenswrapper[4762]: I1009 15:30:19.877547 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerDied","Data":"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a"} Oct 09 15:30:20 crc kubenswrapper[4762]: I1009 15:30:20.891662 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerStarted","Data":"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad"} Oct 09 15:30:20 crc kubenswrapper[4762]: I1009 15:30:20.915255 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mv2z8" podStartSLOduration=2.374623904 podStartE2EDuration="5.91523349s" podCreationTimestamp="2025-10-09 15:30:15 +0000 UTC" firstStartedPulling="2025-10-09 15:30:16.835082767 +0000 UTC m=+7492.608873806" lastFinishedPulling="2025-10-09 15:30:20.375692353 +0000 UTC m=+7496.149483392" observedRunningTime="2025-10-09 15:30:20.910102406 +0000 UTC m=+7496.683893455" watchObservedRunningTime="2025-10-09 15:30:20.91523349 +0000 UTC m=+7496.689024529" Oct 09 15:30:25 crc kubenswrapper[4762]: I1009 15:30:25.772626 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:25 crc kubenswrapper[4762]: I1009 15:30:25.773255 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:25 crc kubenswrapper[4762]: I1009 15:30:25.823759 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:25 crc kubenswrapper[4762]: I1009 15:30:25.982490 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:26 crc kubenswrapper[4762]: I1009 15:30:26.056677 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:26 crc kubenswrapper[4762]: I1009 15:30:26.965919 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:30:26 crc kubenswrapper[4762]: E1009 15:30:26.966442 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:30:27 crc kubenswrapper[4762]: I1009 15:30:27.948866 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mv2z8" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="registry-server" containerID="cri-o://018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad" gracePeriod=2 Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.442752 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.532087 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities\") pod \"b6733753-b079-4895-893e-043ac2af7296\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.532226 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content\") pod \"b6733753-b079-4895-893e-043ac2af7296\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.532320 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfthx\" (UniqueName: \"kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx\") pod \"b6733753-b079-4895-893e-043ac2af7296\" (UID: \"b6733753-b079-4895-893e-043ac2af7296\") " Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.533106 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities" (OuterVolumeSpecName: "utilities") pod "b6733753-b079-4895-893e-043ac2af7296" (UID: "b6733753-b079-4895-893e-043ac2af7296"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.537806 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx" (OuterVolumeSpecName: "kube-api-access-gfthx") pod "b6733753-b079-4895-893e-043ac2af7296" (UID: "b6733753-b079-4895-893e-043ac2af7296"). InnerVolumeSpecName "kube-api-access-gfthx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.582797 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6733753-b079-4895-893e-043ac2af7296" (UID: "b6733753-b079-4895-893e-043ac2af7296"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.634423 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.634468 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfthx\" (UniqueName: \"kubernetes.io/projected/b6733753-b079-4895-893e-043ac2af7296-kube-api-access-gfthx\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.634485 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6733753-b079-4895-893e-043ac2af7296-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.960703 4762 generic.go:334] "Generic (PLEG): container finished" podID="b6733753-b079-4895-893e-043ac2af7296" containerID="018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad" exitCode=0 Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.960720 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mv2z8" Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.960749 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerDied","Data":"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad"} Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.961533 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mv2z8" event={"ID":"b6733753-b079-4895-893e-043ac2af7296","Type":"ContainerDied","Data":"928fa1091ec9a258ee582e35f27823d5470128f77504c164a17c79dd3a22d4b3"} Oct 09 15:30:28 crc kubenswrapper[4762]: I1009 15:30:28.961672 4762 scope.go:117] "RemoveContainer" containerID="018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.004110 4762 scope.go:117] "RemoveContainer" containerID="b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.006661 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.015488 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mv2z8"] Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.037509 4762 scope.go:117] "RemoveContainer" containerID="c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.080190 4762 scope.go:117] "RemoveContainer" containerID="018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad" Oct 09 15:30:29 crc kubenswrapper[4762]: E1009 15:30:29.080717 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad\": container with ID starting with 018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad not found: ID does not exist" containerID="018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.080753 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad"} err="failed to get container status \"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad\": rpc error: code = NotFound desc = could not find container \"018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad\": container with ID starting with 018af8841887eb00bddd02007167dbb812c2ccc01906d03debd290d07f71fdad not found: ID does not exist" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.080775 4762 scope.go:117] "RemoveContainer" containerID="b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a" Oct 09 15:30:29 crc kubenswrapper[4762]: E1009 15:30:29.081133 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a\": container with ID starting with b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a not found: ID does not exist" containerID="b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.081162 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a"} err="failed to get container status \"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a\": rpc error: code = NotFound desc = could not find container \"b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a\": container with ID starting with b9a2a390acb9cc99b8e84f0f06643fbe425e92fe8238a9b27d62dd0d8f9ad93a not found: ID does not exist" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.081180 4762 scope.go:117] "RemoveContainer" containerID="c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569" Oct 09 15:30:29 crc kubenswrapper[4762]: E1009 15:30:29.081527 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569\": container with ID starting with c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569 not found: ID does not exist" containerID="c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569" Oct 09 15:30:29 crc kubenswrapper[4762]: I1009 15:30:29.081559 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569"} err="failed to get container status \"c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569\": rpc error: code = NotFound desc = could not find container \"c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569\": container with ID starting with c82972ff702ecae65d962fd3eeb9e82728563c9dddeaea2b25a094f58e6dd569 not found: ID does not exist" Oct 09 15:30:30 crc kubenswrapper[4762]: I1009 15:30:30.978576 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6733753-b079-4895-893e-043ac2af7296" path="/var/lib/kubelet/pods/b6733753-b079-4895-893e-043ac2af7296/volumes" Oct 09 15:30:38 crc kubenswrapper[4762]: I1009 15:30:38.451375 4762 scope.go:117] "RemoveContainer" containerID="c4c3f741550a42f3ac98a464227145e1d291dd39ee396e1157df5a809a7f4b36" Oct 09 15:30:40 crc kubenswrapper[4762]: I1009 15:30:40.965517 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:30:40 crc kubenswrapper[4762]: E1009 15:30:40.966437 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:30:55 crc kubenswrapper[4762]: I1009 15:30:55.965956 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:30:55 crc kubenswrapper[4762]: E1009 15:30:55.966912 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:31:08 crc kubenswrapper[4762]: I1009 15:31:08.967061 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:31:08 crc kubenswrapper[4762]: E1009 15:31:08.967943 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.832537 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:12 crc kubenswrapper[4762]: E1009 15:31:12.834368 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="registry-server" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.834451 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="registry-server" Oct 09 15:31:12 crc kubenswrapper[4762]: E1009 15:31:12.834518 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="extract-utilities" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.834581 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="extract-utilities" Oct 09 15:31:12 crc kubenswrapper[4762]: E1009 15:31:12.834673 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="extract-content" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.834735 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="extract-content" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.835059 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6733753-b079-4895-893e-043ac2af7296" containerName="registry-server" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.836926 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.860320 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.992197 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dth7j\" (UniqueName: \"kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.992669 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:12 crc kubenswrapper[4762]: I1009 15:31:12.992768 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.094610 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.094687 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.094782 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dth7j\" (UniqueName: \"kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.095624 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.095792 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.122286 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dth7j\" (UniqueName: \"kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j\") pod \"community-operators-mx6vn\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.168791 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:13 crc kubenswrapper[4762]: I1009 15:31:13.757850 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:14 crc kubenswrapper[4762]: I1009 15:31:14.409502 4762 generic.go:334] "Generic (PLEG): container finished" podID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerID="841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720" exitCode=0 Oct 09 15:31:14 crc kubenswrapper[4762]: I1009 15:31:14.409627 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerDied","Data":"841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720"} Oct 09 15:31:14 crc kubenswrapper[4762]: I1009 15:31:14.409803 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerStarted","Data":"5608ee832743191532b1c4ee652568c86498462ac187799fa2f3dbb448cff8f8"} Oct 09 15:31:15 crc kubenswrapper[4762]: I1009 15:31:15.422105 4762 generic.go:334] "Generic (PLEG): container finished" podID="53168398-1447-4747-9b52-02f61f828508" containerID="5cd31fe1a7c2f00c3baedff8ea5d8efe09a0ed8e9d6f7757df50e2fba494f933" exitCode=0 Oct 09 15:31:15 crc kubenswrapper[4762]: I1009 15:31:15.422209 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" event={"ID":"53168398-1447-4747-9b52-02f61f828508","Type":"ContainerDied","Data":"5cd31fe1a7c2f00c3baedff8ea5d8efe09a0ed8e9d6f7757df50e2fba494f933"} Oct 09 15:31:16 crc kubenswrapper[4762]: I1009 15:31:16.432729 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerStarted","Data":"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45"} Oct 09 15:31:16 crc kubenswrapper[4762]: I1009 15:31:16.919967 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.080691 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory\") pod \"53168398-1447-4747-9b52-02f61f828508\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.080740 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2gbq\" (UniqueName: \"kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq\") pod \"53168398-1447-4747-9b52-02f61f828508\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.080758 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph\") pod \"53168398-1447-4747-9b52-02f61f828508\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.080862 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key\") pod \"53168398-1447-4747-9b52-02f61f828508\" (UID: \"53168398-1447-4747-9b52-02f61f828508\") " Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.088348 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph" (OuterVolumeSpecName: "ceph") pod "53168398-1447-4747-9b52-02f61f828508" (UID: "53168398-1447-4747-9b52-02f61f828508"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.091098 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq" (OuterVolumeSpecName: "kube-api-access-j2gbq") pod "53168398-1447-4747-9b52-02f61f828508" (UID: "53168398-1447-4747-9b52-02f61f828508"). InnerVolumeSpecName "kube-api-access-j2gbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.116820 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "53168398-1447-4747-9b52-02f61f828508" (UID: "53168398-1447-4747-9b52-02f61f828508"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.129537 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory" (OuterVolumeSpecName: "inventory") pod "53168398-1447-4747-9b52-02f61f828508" (UID: "53168398-1447-4747-9b52-02f61f828508"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.183707 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.183755 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2gbq\" (UniqueName: \"kubernetes.io/projected/53168398-1447-4747-9b52-02f61f828508-kube-api-access-j2gbq\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.183769 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.183780 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53168398-1447-4747-9b52-02f61f828508-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.442301 4762 generic.go:334] "Generic (PLEG): container finished" podID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerID="41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45" exitCode=0 Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.442379 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerDied","Data":"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45"} Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.443747 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" event={"ID":"53168398-1447-4747-9b52-02f61f828508","Type":"ContainerDied","Data":"a77fe79fea099f91492d2ae86377632b2e0d7012b5e1b74d05425cc55681fbdf"} Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.443786 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a77fe79fea099f91492d2ae86377632b2e0d7012b5e1b74d05425cc55681fbdf" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.443826 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-4dtq9" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.530847 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-4mhld"] Oct 09 15:31:17 crc kubenswrapper[4762]: E1009 15:31:17.531342 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53168398-1447-4747-9b52-02f61f828508" containerName="download-cache-openstack-openstack-cell1" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.531361 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="53168398-1447-4747-9b52-02f61f828508" containerName="download-cache-openstack-openstack-cell1" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.531659 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="53168398-1447-4747-9b52-02f61f828508" containerName="download-cache-openstack-openstack-cell1" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.532549 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.534675 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.534920 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.536659 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.541938 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.566210 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-4mhld"] Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.694771 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.694952 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7kkt\" (UniqueName: \"kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.695185 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.695246 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.797486 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.797532 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.797566 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.797723 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7kkt\" (UniqueName: \"kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.803028 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.814240 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.814536 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7kkt\" (UniqueName: \"kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.821437 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory\") pod \"configure-network-openstack-openstack-cell1-4mhld\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:17 crc kubenswrapper[4762]: I1009 15:31:17.859040 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:31:18 crc kubenswrapper[4762]: I1009 15:31:18.391566 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-4mhld"] Oct 09 15:31:18 crc kubenswrapper[4762]: I1009 15:31:18.456553 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerStarted","Data":"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93"} Oct 09 15:31:18 crc kubenswrapper[4762]: I1009 15:31:18.458407 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" event={"ID":"3e25d87c-a7f5-4c9d-b1e4-552517e15174","Type":"ContainerStarted","Data":"1d572ff1bbe5aebef53c47a792159b717de2074a213d9e46db46137a1a3c5cca"} Oct 09 15:31:18 crc kubenswrapper[4762]: I1009 15:31:18.480179 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mx6vn" podStartSLOduration=2.782598623 podStartE2EDuration="6.480161813s" podCreationTimestamp="2025-10-09 15:31:12 +0000 UTC" firstStartedPulling="2025-10-09 15:31:14.411768867 +0000 UTC m=+7550.185559916" lastFinishedPulling="2025-10-09 15:31:18.109332067 +0000 UTC m=+7553.883123106" observedRunningTime="2025-10-09 15:31:18.475294785 +0000 UTC m=+7554.249085824" watchObservedRunningTime="2025-10-09 15:31:18.480161813 +0000 UTC m=+7554.253952852" Oct 09 15:31:19 crc kubenswrapper[4762]: I1009 15:31:19.467909 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" event={"ID":"3e25d87c-a7f5-4c9d-b1e4-552517e15174","Type":"ContainerStarted","Data":"06b168a48eb1d7c4edf1919ce9b8e963251047af03ec6e6df2be104b70dc5374"} Oct 09 15:31:19 crc kubenswrapper[4762]: I1009 15:31:19.492055 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" podStartSLOduration=1.9393191669999998 podStartE2EDuration="2.492034999s" podCreationTimestamp="2025-10-09 15:31:17 +0000 UTC" firstStartedPulling="2025-10-09 15:31:18.402368408 +0000 UTC m=+7554.176159447" lastFinishedPulling="2025-10-09 15:31:18.95508425 +0000 UTC m=+7554.728875279" observedRunningTime="2025-10-09 15:31:19.483923578 +0000 UTC m=+7555.257714617" watchObservedRunningTime="2025-10-09 15:31:19.492034999 +0000 UTC m=+7555.265826038" Oct 09 15:31:21 crc kubenswrapper[4762]: I1009 15:31:21.965768 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:31:21 crc kubenswrapper[4762]: E1009 15:31:21.967888 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:31:23 crc kubenswrapper[4762]: I1009 15:31:23.169440 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:23 crc kubenswrapper[4762]: I1009 15:31:23.169625 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:23 crc kubenswrapper[4762]: I1009 15:31:23.225225 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:23 crc kubenswrapper[4762]: I1009 15:31:23.563547 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:23 crc kubenswrapper[4762]: I1009 15:31:23.607597 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:25 crc kubenswrapper[4762]: I1009 15:31:25.537626 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mx6vn" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="registry-server" containerID="cri-o://ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93" gracePeriod=2 Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.049261 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.176725 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content\") pod \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.176954 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities\") pod \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.177065 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dth7j\" (UniqueName: \"kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j\") pod \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\" (UID: \"284e37a9-b998-4b1e-8089-ae2c7dbf7e62\") " Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.179438 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities" (OuterVolumeSpecName: "utilities") pod "284e37a9-b998-4b1e-8089-ae2c7dbf7e62" (UID: "284e37a9-b998-4b1e-8089-ae2c7dbf7e62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.190763 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j" (OuterVolumeSpecName: "kube-api-access-dth7j") pod "284e37a9-b998-4b1e-8089-ae2c7dbf7e62" (UID: "284e37a9-b998-4b1e-8089-ae2c7dbf7e62"). InnerVolumeSpecName "kube-api-access-dth7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.280552 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dth7j\" (UniqueName: \"kubernetes.io/projected/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-kube-api-access-dth7j\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.280593 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.549036 4762 generic.go:334] "Generic (PLEG): container finished" podID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerID="ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93" exitCode=0 Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.549090 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerDied","Data":"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93"} Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.549113 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mx6vn" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.549124 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mx6vn" event={"ID":"284e37a9-b998-4b1e-8089-ae2c7dbf7e62","Type":"ContainerDied","Data":"5608ee832743191532b1c4ee652568c86498462ac187799fa2f3dbb448cff8f8"} Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.549146 4762 scope.go:117] "RemoveContainer" containerID="ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.571553 4762 scope.go:117] "RemoveContainer" containerID="41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.595170 4762 scope.go:117] "RemoveContainer" containerID="841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.647024 4762 scope.go:117] "RemoveContainer" containerID="ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93" Oct 09 15:31:26 crc kubenswrapper[4762]: E1009 15:31:26.647828 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93\": container with ID starting with ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93 not found: ID does not exist" containerID="ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.647936 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93"} err="failed to get container status \"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93\": rpc error: code = NotFound desc = could not find container \"ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93\": container with ID starting with ac692e17833603e4598066991c448fe9b137c180ca0ff9c47d069d0724d7da93 not found: ID does not exist" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.648029 4762 scope.go:117] "RemoveContainer" containerID="41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45" Oct 09 15:31:26 crc kubenswrapper[4762]: E1009 15:31:26.648589 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45\": container with ID starting with 41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45 not found: ID does not exist" containerID="41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.648750 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45"} err="failed to get container status \"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45\": rpc error: code = NotFound desc = could not find container \"41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45\": container with ID starting with 41b6d822ee1550d5d3c670874b1e672ca0000c548775d8ba5d74d1df70629d45 not found: ID does not exist" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.648798 4762 scope.go:117] "RemoveContainer" containerID="841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720" Oct 09 15:31:26 crc kubenswrapper[4762]: E1009 15:31:26.649067 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720\": container with ID starting with 841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720 not found: ID does not exist" containerID="841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.649102 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720"} err="failed to get container status \"841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720\": rpc error: code = NotFound desc = could not find container \"841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720\": container with ID starting with 841d9312fac3b9bd7c4b174b54135cd85d0d952e597a104391b7c06add26b720 not found: ID does not exist" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.747759 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "284e37a9-b998-4b1e-8089-ae2c7dbf7e62" (UID: "284e37a9-b998-4b1e-8089-ae2c7dbf7e62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.798326 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284e37a9-b998-4b1e-8089-ae2c7dbf7e62-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.889692 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.901753 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mx6vn"] Oct 09 15:31:26 crc kubenswrapper[4762]: I1009 15:31:26.981487 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" path="/var/lib/kubelet/pods/284e37a9-b998-4b1e-8089-ae2c7dbf7e62/volumes" Oct 09 15:31:36 crc kubenswrapper[4762]: I1009 15:31:36.966024 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:31:36 crc kubenswrapper[4762]: E1009 15:31:36.967296 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:31:50 crc kubenswrapper[4762]: I1009 15:31:50.965315 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:31:50 crc kubenswrapper[4762]: E1009 15:31:50.966334 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:32:02 crc kubenswrapper[4762]: I1009 15:32:02.966068 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:32:02 crc kubenswrapper[4762]: E1009 15:32:02.966926 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:32:17 crc kubenswrapper[4762]: I1009 15:32:17.966681 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:32:17 crc kubenswrapper[4762]: E1009 15:32:17.967658 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:32:32 crc kubenswrapper[4762]: I1009 15:32:32.966254 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:32:32 crc kubenswrapper[4762]: E1009 15:32:32.967728 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:32:38 crc kubenswrapper[4762]: I1009 15:32:38.279585 4762 generic.go:334] "Generic (PLEG): container finished" podID="3e25d87c-a7f5-4c9d-b1e4-552517e15174" containerID="06b168a48eb1d7c4edf1919ce9b8e963251047af03ec6e6df2be104b70dc5374" exitCode=0 Oct 09 15:32:38 crc kubenswrapper[4762]: I1009 15:32:38.279740 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" event={"ID":"3e25d87c-a7f5-4c9d-b1e4-552517e15174","Type":"ContainerDied","Data":"06b168a48eb1d7c4edf1919ce9b8e963251047af03ec6e6df2be104b70dc5374"} Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.777109 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.818084 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory\") pod \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.818301 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph\") pod \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.818377 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key\") pod \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.818412 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7kkt\" (UniqueName: \"kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt\") pod \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\" (UID: \"3e25d87c-a7f5-4c9d-b1e4-552517e15174\") " Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.823973 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph" (OuterVolumeSpecName: "ceph") pod "3e25d87c-a7f5-4c9d-b1e4-552517e15174" (UID: "3e25d87c-a7f5-4c9d-b1e4-552517e15174"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.824241 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt" (OuterVolumeSpecName: "kube-api-access-j7kkt") pod "3e25d87c-a7f5-4c9d-b1e4-552517e15174" (UID: "3e25d87c-a7f5-4c9d-b1e4-552517e15174"). InnerVolumeSpecName "kube-api-access-j7kkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.858063 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e25d87c-a7f5-4c9d-b1e4-552517e15174" (UID: "3e25d87c-a7f5-4c9d-b1e4-552517e15174"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.859453 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory" (OuterVolumeSpecName: "inventory") pod "3e25d87c-a7f5-4c9d-b1e4-552517e15174" (UID: "3e25d87c-a7f5-4c9d-b1e4-552517e15174"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.920452 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.920478 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.920486 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e25d87c-a7f5-4c9d-b1e4-552517e15174-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:39 crc kubenswrapper[4762]: I1009 15:32:39.920495 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7kkt\" (UniqueName: \"kubernetes.io/projected/3e25d87c-a7f5-4c9d-b1e4-552517e15174-kube-api-access-j7kkt\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.299775 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" event={"ID":"3e25d87c-a7f5-4c9d-b1e4-552517e15174","Type":"ContainerDied","Data":"1d572ff1bbe5aebef53c47a792159b717de2074a213d9e46db46137a1a3c5cca"} Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.299827 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d572ff1bbe5aebef53c47a792159b717de2074a213d9e46db46137a1a3c5cca" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.299938 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-4mhld" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.385372 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-882r6"] Oct 09 15:32:40 crc kubenswrapper[4762]: E1009 15:32:40.386033 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="extract-content" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386057 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="extract-content" Oct 09 15:32:40 crc kubenswrapper[4762]: E1009 15:32:40.386066 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e25d87c-a7f5-4c9d-b1e4-552517e15174" containerName="configure-network-openstack-openstack-cell1" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386077 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e25d87c-a7f5-4c9d-b1e4-552517e15174" containerName="configure-network-openstack-openstack-cell1" Oct 09 15:32:40 crc kubenswrapper[4762]: E1009 15:32:40.386097 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="registry-server" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386106 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="registry-server" Oct 09 15:32:40 crc kubenswrapper[4762]: E1009 15:32:40.386126 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="extract-utilities" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386135 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="extract-utilities" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386434 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="284e37a9-b998-4b1e-8089-ae2c7dbf7e62" containerName="registry-server" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.386464 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e25d87c-a7f5-4c9d-b1e4-552517e15174" containerName="configure-network-openstack-openstack-cell1" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.387464 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.389442 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.389716 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.390584 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.390743 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.396476 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-882r6"] Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.430222 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.430376 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.430408 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.430449 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4nvn\" (UniqueName: \"kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.531778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4nvn\" (UniqueName: \"kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.531868 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.531996 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.532025 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.537931 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.539574 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.547285 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.555872 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4nvn\" (UniqueName: \"kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn\") pod \"validate-network-openstack-openstack-cell1-882r6\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:40 crc kubenswrapper[4762]: I1009 15:32:40.704721 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:41 crc kubenswrapper[4762]: I1009 15:32:41.261001 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-882r6"] Oct 09 15:32:41 crc kubenswrapper[4762]: I1009 15:32:41.264772 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:32:41 crc kubenswrapper[4762]: I1009 15:32:41.312016 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-882r6" event={"ID":"11a15b07-738a-401b-a0c2-28c664777750","Type":"ContainerStarted","Data":"f2a94c2acb509bce045e1cae3d59c9388a4b36ac484ec6592b94e7829e051068"} Oct 09 15:32:42 crc kubenswrapper[4762]: I1009 15:32:42.324374 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-882r6" event={"ID":"11a15b07-738a-401b-a0c2-28c664777750","Type":"ContainerStarted","Data":"185829fc157920ba3dc31467506338bde5c7d1dff14f887d34de97f467fc5349"} Oct 09 15:32:42 crc kubenswrapper[4762]: I1009 15:32:42.351573 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-882r6" podStartSLOduration=1.8217430239999999 podStartE2EDuration="2.351551678s" podCreationTimestamp="2025-10-09 15:32:40 +0000 UTC" firstStartedPulling="2025-10-09 15:32:41.264493624 +0000 UTC m=+7637.038284663" lastFinishedPulling="2025-10-09 15:32:41.794302278 +0000 UTC m=+7637.568093317" observedRunningTime="2025-10-09 15:32:42.344618036 +0000 UTC m=+7638.118409095" watchObservedRunningTime="2025-10-09 15:32:42.351551678 +0000 UTC m=+7638.125342727" Oct 09 15:32:45 crc kubenswrapper[4762]: I1009 15:32:45.965663 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:32:45 crc kubenswrapper[4762]: E1009 15:32:45.966179 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:32:47 crc kubenswrapper[4762]: I1009 15:32:47.372523 4762 generic.go:334] "Generic (PLEG): container finished" podID="11a15b07-738a-401b-a0c2-28c664777750" containerID="185829fc157920ba3dc31467506338bde5c7d1dff14f887d34de97f467fc5349" exitCode=0 Oct 09 15:32:47 crc kubenswrapper[4762]: I1009 15:32:47.372601 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-882r6" event={"ID":"11a15b07-738a-401b-a0c2-28c664777750","Type":"ContainerDied","Data":"185829fc157920ba3dc31467506338bde5c7d1dff14f887d34de97f467fc5349"} Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.840564 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.905029 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key\") pod \"11a15b07-738a-401b-a0c2-28c664777750\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.905694 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4nvn\" (UniqueName: \"kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn\") pod \"11a15b07-738a-401b-a0c2-28c664777750\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.905781 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph\") pod \"11a15b07-738a-401b-a0c2-28c664777750\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.905803 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory\") pod \"11a15b07-738a-401b-a0c2-28c664777750\" (UID: \"11a15b07-738a-401b-a0c2-28c664777750\") " Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.911982 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn" (OuterVolumeSpecName: "kube-api-access-v4nvn") pod "11a15b07-738a-401b-a0c2-28c664777750" (UID: "11a15b07-738a-401b-a0c2-28c664777750"). InnerVolumeSpecName "kube-api-access-v4nvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.912021 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph" (OuterVolumeSpecName: "ceph") pod "11a15b07-738a-401b-a0c2-28c664777750" (UID: "11a15b07-738a-401b-a0c2-28c664777750"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.946885 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory" (OuterVolumeSpecName: "inventory") pod "11a15b07-738a-401b-a0c2-28c664777750" (UID: "11a15b07-738a-401b-a0c2-28c664777750"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:48 crc kubenswrapper[4762]: I1009 15:32:48.954932 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "11a15b07-738a-401b-a0c2-28c664777750" (UID: "11a15b07-738a-401b-a0c2-28c664777750"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.007735 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.007763 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.007772 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/11a15b07-738a-401b-a0c2-28c664777750-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.007781 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4nvn\" (UniqueName: \"kubernetes.io/projected/11a15b07-738a-401b-a0c2-28c664777750-kube-api-access-v4nvn\") on node \"crc\" DevicePath \"\"" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.393101 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-882r6" event={"ID":"11a15b07-738a-401b-a0c2-28c664777750","Type":"ContainerDied","Data":"f2a94c2acb509bce045e1cae3d59c9388a4b36ac484ec6592b94e7829e051068"} Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.393142 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2a94c2acb509bce045e1cae3d59c9388a4b36ac484ec6592b94e7829e051068" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.393272 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-882r6" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.452719 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-vxnnh"] Oct 09 15:32:49 crc kubenswrapper[4762]: E1009 15:32:49.453468 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a15b07-738a-401b-a0c2-28c664777750" containerName="validate-network-openstack-openstack-cell1" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.453581 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a15b07-738a-401b-a0c2-28c664777750" containerName="validate-network-openstack-openstack-cell1" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.453934 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="11a15b07-738a-401b-a0c2-28c664777750" containerName="validate-network-openstack-openstack-cell1" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.454782 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.462532 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.463094 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.462824 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.462910 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.489072 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-vxnnh"] Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.620560 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nqqr\" (UniqueName: \"kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.621238 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.621432 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.621591 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.723988 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.724085 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.724114 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.724209 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nqqr\" (UniqueName: \"kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.730566 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.730843 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.731376 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.745032 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nqqr\" (UniqueName: \"kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr\") pod \"install-os-openstack-openstack-cell1-vxnnh\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:49 crc kubenswrapper[4762]: I1009 15:32:49.827345 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:32:50 crc kubenswrapper[4762]: I1009 15:32:50.478348 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-vxnnh"] Oct 09 15:32:51 crc kubenswrapper[4762]: I1009 15:32:51.421170 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" event={"ID":"f4249fbb-12c1-4788-8fb6-5915e85139c1","Type":"ContainerStarted","Data":"d4f0961b4af9555af8039ab57ca367f9c014003a85268df199a77567ecd3b9c2"} Oct 09 15:32:51 crc kubenswrapper[4762]: I1009 15:32:51.421818 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" event={"ID":"f4249fbb-12c1-4788-8fb6-5915e85139c1","Type":"ContainerStarted","Data":"3dcf685d75deda2435a153f96753f3586f2d494789e9223dfc1a8f2c508f7bb4"} Oct 09 15:32:51 crc kubenswrapper[4762]: I1009 15:32:51.441328 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" podStartSLOduration=2.044630115 podStartE2EDuration="2.441307646s" podCreationTimestamp="2025-10-09 15:32:49 +0000 UTC" firstStartedPulling="2025-10-09 15:32:50.481441738 +0000 UTC m=+7646.255232777" lastFinishedPulling="2025-10-09 15:32:50.878119269 +0000 UTC m=+7646.651910308" observedRunningTime="2025-10-09 15:32:51.435017111 +0000 UTC m=+7647.208808150" watchObservedRunningTime="2025-10-09 15:32:51.441307646 +0000 UTC m=+7647.215098685" Oct 09 15:32:56 crc kubenswrapper[4762]: I1009 15:32:56.965959 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:32:56 crc kubenswrapper[4762]: E1009 15:32:56.967039 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:33:11 crc kubenswrapper[4762]: I1009 15:33:11.965559 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:33:11 crc kubenswrapper[4762]: E1009 15:33:11.966430 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:33:22 crc kubenswrapper[4762]: I1009 15:33:22.965977 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:33:22 crc kubenswrapper[4762]: E1009 15:33:22.966854 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:33:35 crc kubenswrapper[4762]: I1009 15:33:35.832283 4762 generic.go:334] "Generic (PLEG): container finished" podID="f4249fbb-12c1-4788-8fb6-5915e85139c1" containerID="d4f0961b4af9555af8039ab57ca367f9c014003a85268df199a77567ecd3b9c2" exitCode=0 Oct 09 15:33:35 crc kubenswrapper[4762]: I1009 15:33:35.832378 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" event={"ID":"f4249fbb-12c1-4788-8fb6-5915e85139c1","Type":"ContainerDied","Data":"d4f0961b4af9555af8039ab57ca367f9c014003a85268df199a77567ecd3b9c2"} Oct 09 15:33:35 crc kubenswrapper[4762]: I1009 15:33:35.964972 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:33:35 crc kubenswrapper[4762]: E1009 15:33:35.965253 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.314409 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.451817 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory\") pod \"f4249fbb-12c1-4788-8fb6-5915e85139c1\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.452489 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph\") pod \"f4249fbb-12c1-4788-8fb6-5915e85139c1\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.452782 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key\") pod \"f4249fbb-12c1-4788-8fb6-5915e85139c1\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.453121 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nqqr\" (UniqueName: \"kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr\") pod \"f4249fbb-12c1-4788-8fb6-5915e85139c1\" (UID: \"f4249fbb-12c1-4788-8fb6-5915e85139c1\") " Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.464430 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph" (OuterVolumeSpecName: "ceph") pod "f4249fbb-12c1-4788-8fb6-5915e85139c1" (UID: "f4249fbb-12c1-4788-8fb6-5915e85139c1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.464729 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr" (OuterVolumeSpecName: "kube-api-access-8nqqr") pod "f4249fbb-12c1-4788-8fb6-5915e85139c1" (UID: "f4249fbb-12c1-4788-8fb6-5915e85139c1"). InnerVolumeSpecName "kube-api-access-8nqqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.483218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory" (OuterVolumeSpecName: "inventory") pod "f4249fbb-12c1-4788-8fb6-5915e85139c1" (UID: "f4249fbb-12c1-4788-8fb6-5915e85139c1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.483657 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f4249fbb-12c1-4788-8fb6-5915e85139c1" (UID: "f4249fbb-12c1-4788-8fb6-5915e85139c1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.555298 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nqqr\" (UniqueName: \"kubernetes.io/projected/f4249fbb-12c1-4788-8fb6-5915e85139c1-kube-api-access-8nqqr\") on node \"crc\" DevicePath \"\"" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.555335 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.555346 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.555357 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4249fbb-12c1-4788-8fb6-5915e85139c1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.856305 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" event={"ID":"f4249fbb-12c1-4788-8fb6-5915e85139c1","Type":"ContainerDied","Data":"3dcf685d75deda2435a153f96753f3586f2d494789e9223dfc1a8f2c508f7bb4"} Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.856352 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3dcf685d75deda2435a153f96753f3586f2d494789e9223dfc1a8f2c508f7bb4" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.856444 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-vxnnh" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.931473 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-zfcmt"] Oct 09 15:33:37 crc kubenswrapper[4762]: E1009 15:33:37.932417 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4249fbb-12c1-4788-8fb6-5915e85139c1" containerName="install-os-openstack-openstack-cell1" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.932480 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4249fbb-12c1-4788-8fb6-5915e85139c1" containerName="install-os-openstack-openstack-cell1" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.932820 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4249fbb-12c1-4788-8fb6-5915e85139c1" containerName="install-os-openstack-openstack-cell1" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.933942 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.936199 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.937188 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.937352 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.938219 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:33:37 crc kubenswrapper[4762]: I1009 15:33:37.943526 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-zfcmt"] Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.065142 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.065556 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62945\" (UniqueName: \"kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.065670 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.065713 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.167936 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.168267 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62945\" (UniqueName: \"kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.168459 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.168570 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.175228 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.175275 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.176457 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.184272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62945\" (UniqueName: \"kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945\") pod \"configure-os-openstack-openstack-cell1-zfcmt\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:38 crc kubenswrapper[4762]: I1009 15:33:38.262266 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:33:39 crc kubenswrapper[4762]: I1009 15:33:38.804958 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-zfcmt"] Oct 09 15:33:39 crc kubenswrapper[4762]: I1009 15:33:38.883508 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" event={"ID":"0b649b90-58af-4419-8433-f83b4e793b9f","Type":"ContainerStarted","Data":"7582e2dd9fca7b701bb9da0dc5c00d648788284938763e9c559a6251c0aca549"} Oct 09 15:33:39 crc kubenswrapper[4762]: I1009 15:33:39.893409 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" event={"ID":"0b649b90-58af-4419-8433-f83b4e793b9f","Type":"ContainerStarted","Data":"c3fded52038e5b3350f1e9a36c340b70517eede24ccc1976bfa95d68b9ca0058"} Oct 09 15:33:39 crc kubenswrapper[4762]: I1009 15:33:39.925933 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" podStartSLOduration=2.356712724 podStartE2EDuration="2.925913017s" podCreationTimestamp="2025-10-09 15:33:37 +0000 UTC" firstStartedPulling="2025-10-09 15:33:38.815726529 +0000 UTC m=+7694.589517568" lastFinishedPulling="2025-10-09 15:33:39.384926822 +0000 UTC m=+7695.158717861" observedRunningTime="2025-10-09 15:33:39.914194811 +0000 UTC m=+7695.687985860" watchObservedRunningTime="2025-10-09 15:33:39.925913017 +0000 UTC m=+7695.699704076" Oct 09 15:33:50 crc kubenswrapper[4762]: I1009 15:33:50.965889 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:33:50 crc kubenswrapper[4762]: E1009 15:33:50.966809 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:34:03 crc kubenswrapper[4762]: I1009 15:34:03.966252 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:34:03 crc kubenswrapper[4762]: E1009 15:34:03.967139 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:34:14 crc kubenswrapper[4762]: I1009 15:34:14.973014 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:34:14 crc kubenswrapper[4762]: E1009 15:34:14.973817 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:34:23 crc kubenswrapper[4762]: I1009 15:34:23.286332 4762 generic.go:334] "Generic (PLEG): container finished" podID="0b649b90-58af-4419-8433-f83b4e793b9f" containerID="c3fded52038e5b3350f1e9a36c340b70517eede24ccc1976bfa95d68b9ca0058" exitCode=0 Oct 09 15:34:23 crc kubenswrapper[4762]: I1009 15:34:23.286403 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" event={"ID":"0b649b90-58af-4419-8433-f83b4e793b9f","Type":"ContainerDied","Data":"c3fded52038e5b3350f1e9a36c340b70517eede24ccc1976bfa95d68b9ca0058"} Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.798438 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.977918 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph\") pod \"0b649b90-58af-4419-8433-f83b4e793b9f\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.978033 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key\") pod \"0b649b90-58af-4419-8433-f83b4e793b9f\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.978154 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62945\" (UniqueName: \"kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945\") pod \"0b649b90-58af-4419-8433-f83b4e793b9f\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.978182 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory\") pod \"0b649b90-58af-4419-8433-f83b4e793b9f\" (UID: \"0b649b90-58af-4419-8433-f83b4e793b9f\") " Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.984509 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph" (OuterVolumeSpecName: "ceph") pod "0b649b90-58af-4419-8433-f83b4e793b9f" (UID: "0b649b90-58af-4419-8433-f83b4e793b9f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:24 crc kubenswrapper[4762]: I1009 15:34:24.985027 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945" (OuterVolumeSpecName: "kube-api-access-62945") pod "0b649b90-58af-4419-8433-f83b4e793b9f" (UID: "0b649b90-58af-4419-8433-f83b4e793b9f"). InnerVolumeSpecName "kube-api-access-62945". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.007790 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0b649b90-58af-4419-8433-f83b4e793b9f" (UID: "0b649b90-58af-4419-8433-f83b4e793b9f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.009116 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory" (OuterVolumeSpecName: "inventory") pod "0b649b90-58af-4419-8433-f83b4e793b9f" (UID: "0b649b90-58af-4419-8433-f83b4e793b9f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.080429 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.080477 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.080489 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62945\" (UniqueName: \"kubernetes.io/projected/0b649b90-58af-4419-8433-f83b4e793b9f-kube-api-access-62945\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.080498 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0b649b90-58af-4419-8433-f83b4e793b9f-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.307230 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" event={"ID":"0b649b90-58af-4419-8433-f83b4e793b9f","Type":"ContainerDied","Data":"7582e2dd9fca7b701bb9da0dc5c00d648788284938763e9c559a6251c0aca549"} Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.307271 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7582e2dd9fca7b701bb9da0dc5c00d648788284938763e9c559a6251c0aca549" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.307299 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-zfcmt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.391304 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-lmprt"] Oct 09 15:34:25 crc kubenswrapper[4762]: E1009 15:34:25.391898 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b649b90-58af-4419-8433-f83b4e793b9f" containerName="configure-os-openstack-openstack-cell1" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.391922 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b649b90-58af-4419-8433-f83b4e793b9f" containerName="configure-os-openstack-openstack-cell1" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.392207 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b649b90-58af-4419-8433-f83b4e793b9f" containerName="configure-os-openstack-openstack-cell1" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.393258 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.395915 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.397683 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.397713 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.397818 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.413543 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-lmprt"] Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.489310 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.489500 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.489755 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.489878 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8c5c\" (UniqueName: \"kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.592245 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.592291 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.592335 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.592376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8c5c\" (UniqueName: \"kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.596200 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.601102 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.601310 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.612677 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8c5c\" (UniqueName: \"kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c\") pod \"ssh-known-hosts-openstack-lmprt\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:25 crc kubenswrapper[4762]: I1009 15:34:25.718701 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:26 crc kubenswrapper[4762]: I1009 15:34:26.259166 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-lmprt"] Oct 09 15:34:26 crc kubenswrapper[4762]: I1009 15:34:26.320124 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-lmprt" event={"ID":"5c072459-9381-4dc8-901d-1c530af82240","Type":"ContainerStarted","Data":"b2beb9f8349cf9c893cd20593a4081548e4c19cf01d5160656f4e9213b3435e3"} Oct 09 15:34:27 crc kubenswrapper[4762]: I1009 15:34:27.340609 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-lmprt" event={"ID":"5c072459-9381-4dc8-901d-1c530af82240","Type":"ContainerStarted","Data":"243c8c6b0462995f25834ab76ca9354149de286d0e2a479f56747cc30e8f2fe4"} Oct 09 15:34:27 crc kubenswrapper[4762]: I1009 15:34:27.369117 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-lmprt" podStartSLOduration=1.8475282100000001 podStartE2EDuration="2.369083637s" podCreationTimestamp="2025-10-09 15:34:25 +0000 UTC" firstStartedPulling="2025-10-09 15:34:26.26655841 +0000 UTC m=+7742.040349449" lastFinishedPulling="2025-10-09 15:34:26.788113837 +0000 UTC m=+7742.561904876" observedRunningTime="2025-10-09 15:34:27.362565527 +0000 UTC m=+7743.136356576" watchObservedRunningTime="2025-10-09 15:34:27.369083637 +0000 UTC m=+7743.142874666" Oct 09 15:34:27 crc kubenswrapper[4762]: I1009 15:34:27.965901 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:34:27 crc kubenswrapper[4762]: E1009 15:34:27.969169 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:34:35 crc kubenswrapper[4762]: I1009 15:34:35.414858 4762 generic.go:334] "Generic (PLEG): container finished" podID="5c072459-9381-4dc8-901d-1c530af82240" containerID="243c8c6b0462995f25834ab76ca9354149de286d0e2a479f56747cc30e8f2fe4" exitCode=0 Oct 09 15:34:35 crc kubenswrapper[4762]: I1009 15:34:35.414970 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-lmprt" event={"ID":"5c072459-9381-4dc8-901d-1c530af82240","Type":"ContainerDied","Data":"243c8c6b0462995f25834ab76ca9354149de286d0e2a479f56747cc30e8f2fe4"} Oct 09 15:34:36 crc kubenswrapper[4762]: I1009 15:34:36.834467 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.031702 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8c5c\" (UniqueName: \"kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c\") pod \"5c072459-9381-4dc8-901d-1c530af82240\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.031772 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1\") pod \"5c072459-9381-4dc8-901d-1c530af82240\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.031881 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph\") pod \"5c072459-9381-4dc8-901d-1c530af82240\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.031930 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0\") pod \"5c072459-9381-4dc8-901d-1c530af82240\" (UID: \"5c072459-9381-4dc8-901d-1c530af82240\") " Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.038396 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph" (OuterVolumeSpecName: "ceph") pod "5c072459-9381-4dc8-901d-1c530af82240" (UID: "5c072459-9381-4dc8-901d-1c530af82240"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.039464 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c" (OuterVolumeSpecName: "kube-api-access-k8c5c") pod "5c072459-9381-4dc8-901d-1c530af82240" (UID: "5c072459-9381-4dc8-901d-1c530af82240"). InnerVolumeSpecName "kube-api-access-k8c5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.066492 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "5c072459-9381-4dc8-901d-1c530af82240" (UID: "5c072459-9381-4dc8-901d-1c530af82240"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.073597 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "5c072459-9381-4dc8-901d-1c530af82240" (UID: "5c072459-9381-4dc8-901d-1c530af82240"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.134543 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8c5c\" (UniqueName: \"kubernetes.io/projected/5c072459-9381-4dc8-901d-1c530af82240-kube-api-access-k8c5c\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.134581 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.134590 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.134599 4762 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/5c072459-9381-4dc8-901d-1c530af82240-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.437379 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-lmprt" event={"ID":"5c072459-9381-4dc8-901d-1c530af82240","Type":"ContainerDied","Data":"b2beb9f8349cf9c893cd20593a4081548e4c19cf01d5160656f4e9213b3435e3"} Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.437439 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2beb9f8349cf9c893cd20593a4081548e4c19cf01d5160656f4e9213b3435e3" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.437452 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-lmprt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.507237 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-w2xrt"] Oct 09 15:34:37 crc kubenswrapper[4762]: E1009 15:34:37.507772 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c072459-9381-4dc8-901d-1c530af82240" containerName="ssh-known-hosts-openstack" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.507793 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c072459-9381-4dc8-901d-1c530af82240" containerName="ssh-known-hosts-openstack" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.508024 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c072459-9381-4dc8-901d-1c530af82240" containerName="ssh-known-hosts-openstack" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.508902 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.512585 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.512719 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.512915 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.512595 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.534284 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-w2xrt"] Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.646189 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g729r\" (UniqueName: \"kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.646236 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.646356 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.646404 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.747829 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.748282 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g729r\" (UniqueName: \"kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.748376 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.748543 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.753172 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.754089 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.757876 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.776958 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g729r\" (UniqueName: \"kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r\") pod \"run-os-openstack-openstack-cell1-w2xrt\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:37 crc kubenswrapper[4762]: I1009 15:34:37.829568 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:38 crc kubenswrapper[4762]: I1009 15:34:38.350270 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-w2xrt"] Oct 09 15:34:38 crc kubenswrapper[4762]: I1009 15:34:38.447763 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" event={"ID":"34250f79-9b18-46b1-9d68-07c18c78c268","Type":"ContainerStarted","Data":"a4a175bc6e24a0488a68f1dfca57af3b973a75a9dc21f38982dd2a0807ef2ea1"} Oct 09 15:34:38 crc kubenswrapper[4762]: I1009 15:34:38.966655 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:34:38 crc kubenswrapper[4762]: E1009 15:34:38.967185 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:34:39 crc kubenswrapper[4762]: I1009 15:34:39.459031 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" event={"ID":"34250f79-9b18-46b1-9d68-07c18c78c268","Type":"ContainerStarted","Data":"f605dfe9f24cfb62c323e21be46b9b59a1e18f3db92e4675fa017b56e12e97ce"} Oct 09 15:34:39 crc kubenswrapper[4762]: I1009 15:34:39.482176 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" podStartSLOduration=2.058397476 podStartE2EDuration="2.482150436s" podCreationTimestamp="2025-10-09 15:34:37 +0000 UTC" firstStartedPulling="2025-10-09 15:34:38.35560664 +0000 UTC m=+7754.129397679" lastFinishedPulling="2025-10-09 15:34:38.7793596 +0000 UTC m=+7754.553150639" observedRunningTime="2025-10-09 15:34:39.472221107 +0000 UTC m=+7755.246012166" watchObservedRunningTime="2025-10-09 15:34:39.482150436 +0000 UTC m=+7755.255941475" Oct 09 15:34:47 crc kubenswrapper[4762]: I1009 15:34:47.528330 4762 generic.go:334] "Generic (PLEG): container finished" podID="34250f79-9b18-46b1-9d68-07c18c78c268" containerID="f605dfe9f24cfb62c323e21be46b9b59a1e18f3db92e4675fa017b56e12e97ce" exitCode=0 Oct 09 15:34:47 crc kubenswrapper[4762]: I1009 15:34:47.528406 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" event={"ID":"34250f79-9b18-46b1-9d68-07c18c78c268","Type":"ContainerDied","Data":"f605dfe9f24cfb62c323e21be46b9b59a1e18f3db92e4675fa017b56e12e97ce"} Oct 09 15:34:48 crc kubenswrapper[4762]: I1009 15:34:48.960316 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.085365 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g729r\" (UniqueName: \"kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r\") pod \"34250f79-9b18-46b1-9d68-07c18c78c268\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.085520 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph\") pod \"34250f79-9b18-46b1-9d68-07c18c78c268\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.085545 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key\") pod \"34250f79-9b18-46b1-9d68-07c18c78c268\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.085681 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory\") pod \"34250f79-9b18-46b1-9d68-07c18c78c268\" (UID: \"34250f79-9b18-46b1-9d68-07c18c78c268\") " Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.090911 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph" (OuterVolumeSpecName: "ceph") pod "34250f79-9b18-46b1-9d68-07c18c78c268" (UID: "34250f79-9b18-46b1-9d68-07c18c78c268"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.091782 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r" (OuterVolumeSpecName: "kube-api-access-g729r") pod "34250f79-9b18-46b1-9d68-07c18c78c268" (UID: "34250f79-9b18-46b1-9d68-07c18c78c268"). InnerVolumeSpecName "kube-api-access-g729r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.114943 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory" (OuterVolumeSpecName: "inventory") pod "34250f79-9b18-46b1-9d68-07c18c78c268" (UID: "34250f79-9b18-46b1-9d68-07c18c78c268"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.125121 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34250f79-9b18-46b1-9d68-07c18c78c268" (UID: "34250f79-9b18-46b1-9d68-07c18c78c268"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.187366 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.187400 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g729r\" (UniqueName: \"kubernetes.io/projected/34250f79-9b18-46b1-9d68-07c18c78c268-kube-api-access-g729r\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.187411 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.187419 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34250f79-9b18-46b1-9d68-07c18c78c268-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.549457 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" event={"ID":"34250f79-9b18-46b1-9d68-07c18c78c268","Type":"ContainerDied","Data":"a4a175bc6e24a0488a68f1dfca57af3b973a75a9dc21f38982dd2a0807ef2ea1"} Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.549848 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4a175bc6e24a0488a68f1dfca57af3b973a75a9dc21f38982dd2a0807ef2ea1" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.549508 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-w2xrt" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.628447 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-djn8r"] Oct 09 15:34:49 crc kubenswrapper[4762]: E1009 15:34:49.629020 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34250f79-9b18-46b1-9d68-07c18c78c268" containerName="run-os-openstack-openstack-cell1" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.629045 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="34250f79-9b18-46b1-9d68-07c18c78c268" containerName="run-os-openstack-openstack-cell1" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.629399 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="34250f79-9b18-46b1-9d68-07c18c78c268" containerName="run-os-openstack-openstack-cell1" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.630346 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.633145 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.633551 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.633776 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.633854 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.641545 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-djn8r"] Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.799767 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.799952 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.800048 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.800096 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xqhn\" (UniqueName: \"kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.901787 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.901919 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.901995 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.902035 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xqhn\" (UniqueName: \"kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.908846 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.909098 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.916029 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.930545 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xqhn\" (UniqueName: \"kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn\") pod \"reboot-os-openstack-openstack-cell1-djn8r\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:49 crc kubenswrapper[4762]: I1009 15:34:49.950919 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:34:50 crc kubenswrapper[4762]: I1009 15:34:50.590390 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-djn8r"] Oct 09 15:34:50 crc kubenswrapper[4762]: I1009 15:34:50.965928 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:34:51 crc kubenswrapper[4762]: I1009 15:34:51.568609 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" event={"ID":"53f7fd79-a0c6-4da7-adce-674efdca56ec","Type":"ContainerStarted","Data":"c1c76678f4b8860695a3de9914865dc10963779c3e2892c8f1b009435369b1fc"} Oct 09 15:34:51 crc kubenswrapper[4762]: I1009 15:34:51.571128 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d"} Oct 09 15:34:52 crc kubenswrapper[4762]: I1009 15:34:52.583919 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" event={"ID":"53f7fd79-a0c6-4da7-adce-674efdca56ec","Type":"ContainerStarted","Data":"7864440f558ef796455389be1b37302689b5095bcad9e768c069be793e2cc8a3"} Oct 09 15:34:52 crc kubenswrapper[4762]: I1009 15:34:52.606987 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" podStartSLOduration=2.87371646 podStartE2EDuration="3.606967376s" podCreationTimestamp="2025-10-09 15:34:49 +0000 UTC" firstStartedPulling="2025-10-09 15:34:50.597068168 +0000 UTC m=+7766.370859207" lastFinishedPulling="2025-10-09 15:34:51.330319084 +0000 UTC m=+7767.104110123" observedRunningTime="2025-10-09 15:34:52.60027778 +0000 UTC m=+7768.374068819" watchObservedRunningTime="2025-10-09 15:34:52.606967376 +0000 UTC m=+7768.380758415" Oct 09 15:35:07 crc kubenswrapper[4762]: I1009 15:35:07.739541 4762 generic.go:334] "Generic (PLEG): container finished" podID="53f7fd79-a0c6-4da7-adce-674efdca56ec" containerID="7864440f558ef796455389be1b37302689b5095bcad9e768c069be793e2cc8a3" exitCode=0 Oct 09 15:35:07 crc kubenswrapper[4762]: I1009 15:35:07.739701 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" event={"ID":"53f7fd79-a0c6-4da7-adce-674efdca56ec","Type":"ContainerDied","Data":"7864440f558ef796455389be1b37302689b5095bcad9e768c069be793e2cc8a3"} Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.292190 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.422631 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xqhn\" (UniqueName: \"kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn\") pod \"53f7fd79-a0c6-4da7-adce-674efdca56ec\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.422796 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory\") pod \"53f7fd79-a0c6-4da7-adce-674efdca56ec\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.422857 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph\") pod \"53f7fd79-a0c6-4da7-adce-674efdca56ec\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.422878 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key\") pod \"53f7fd79-a0c6-4da7-adce-674efdca56ec\" (UID: \"53f7fd79-a0c6-4da7-adce-674efdca56ec\") " Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.436501 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn" (OuterVolumeSpecName: "kube-api-access-4xqhn") pod "53f7fd79-a0c6-4da7-adce-674efdca56ec" (UID: "53f7fd79-a0c6-4da7-adce-674efdca56ec"). InnerVolumeSpecName "kube-api-access-4xqhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.436594 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph" (OuterVolumeSpecName: "ceph") pod "53f7fd79-a0c6-4da7-adce-674efdca56ec" (UID: "53f7fd79-a0c6-4da7-adce-674efdca56ec"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.455198 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory" (OuterVolumeSpecName: "inventory") pod "53f7fd79-a0c6-4da7-adce-674efdca56ec" (UID: "53f7fd79-a0c6-4da7-adce-674efdca56ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.457758 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "53f7fd79-a0c6-4da7-adce-674efdca56ec" (UID: "53f7fd79-a0c6-4da7-adce-674efdca56ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.527309 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.527348 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.527361 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53f7fd79-a0c6-4da7-adce-674efdca56ec-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.527374 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xqhn\" (UniqueName: \"kubernetes.io/projected/53f7fd79-a0c6-4da7-adce-674efdca56ec-kube-api-access-4xqhn\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.767660 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" event={"ID":"53f7fd79-a0c6-4da7-adce-674efdca56ec","Type":"ContainerDied","Data":"c1c76678f4b8860695a3de9914865dc10963779c3e2892c8f1b009435369b1fc"} Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.768006 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1c76678f4b8860695a3de9914865dc10963779c3e2892c8f1b009435369b1fc" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.767760 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-djn8r" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.860457 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-sbxmm"] Oct 09 15:35:09 crc kubenswrapper[4762]: E1009 15:35:09.860978 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f7fd79-a0c6-4da7-adce-674efdca56ec" containerName="reboot-os-openstack-openstack-cell1" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.861006 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f7fd79-a0c6-4da7-adce-674efdca56ec" containerName="reboot-os-openstack-openstack-cell1" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.861287 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="53f7fd79-a0c6-4da7-adce-674efdca56ec" containerName="reboot-os-openstack-openstack-cell1" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.862611 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.865666 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.867514 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.870295 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.870704 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.879392 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-sbxmm"] Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936457 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936510 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936577 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936602 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936698 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.936852 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8d74\" (UniqueName: \"kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937121 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937245 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937339 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937405 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937443 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:09 crc kubenswrapper[4762]: I1009 15:35:09.937484 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039053 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8d74\" (UniqueName: \"kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039190 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039265 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039323 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039353 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039374 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039405 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039488 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039530 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039685 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039726 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.039746 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.044341 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.044408 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.045509 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.045687 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.045753 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.047092 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.047505 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.053118 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.055394 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.055453 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.056248 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.072248 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8d74\" (UniqueName: \"kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74\") pod \"install-certs-openstack-openstack-cell1-sbxmm\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.190621 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.744851 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-sbxmm"] Oct 09 15:35:10 crc kubenswrapper[4762]: W1009 15:35:10.767855 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2d7c4de_054b_4396_984f_a0e55657c9d2.slice/crio-74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd WatchSource:0}: Error finding container 74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd: Status 404 returned error can't find the container with id 74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd Oct 09 15:35:10 crc kubenswrapper[4762]: I1009 15:35:10.778717 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" event={"ID":"d2d7c4de-054b-4396-984f-a0e55657c9d2","Type":"ContainerStarted","Data":"74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd"} Oct 09 15:35:13 crc kubenswrapper[4762]: I1009 15:35:13.810388 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" event={"ID":"d2d7c4de-054b-4396-984f-a0e55657c9d2","Type":"ContainerStarted","Data":"80e11beb7020e0824fc6975548ded507d712b42e8bcdcecbdffc6ee698667beb"} Oct 09 15:35:13 crc kubenswrapper[4762]: I1009 15:35:13.830103 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" podStartSLOduration=3.256580993 podStartE2EDuration="4.830082853s" podCreationTimestamp="2025-10-09 15:35:09 +0000 UTC" firstStartedPulling="2025-10-09 15:35:10.769638125 +0000 UTC m=+7786.543429164" lastFinishedPulling="2025-10-09 15:35:12.343139985 +0000 UTC m=+7788.116931024" observedRunningTime="2025-10-09 15:35:13.825877103 +0000 UTC m=+7789.599668142" watchObservedRunningTime="2025-10-09 15:35:13.830082853 +0000 UTC m=+7789.603873892" Oct 09 15:35:31 crc kubenswrapper[4762]: I1009 15:35:31.977986 4762 generic.go:334] "Generic (PLEG): container finished" podID="d2d7c4de-054b-4396-984f-a0e55657c9d2" containerID="80e11beb7020e0824fc6975548ded507d712b42e8bcdcecbdffc6ee698667beb" exitCode=0 Oct 09 15:35:31 crc kubenswrapper[4762]: I1009 15:35:31.978049 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" event={"ID":"d2d7c4de-054b-4396-984f-a0e55657c9d2","Type":"ContainerDied","Data":"80e11beb7020e0824fc6975548ded507d712b42e8bcdcecbdffc6ee698667beb"} Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.443151 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.561771 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.561876 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.561929 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.561980 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562042 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562118 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562174 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562192 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562214 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8d74\" (UniqueName: \"kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562255 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562301 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.562319 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle\") pod \"d2d7c4de-054b-4396-984f-a0e55657c9d2\" (UID: \"d2d7c4de-054b-4396-984f-a0e55657c9d2\") " Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.568295 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.568338 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.568896 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.569175 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74" (OuterVolumeSpecName: "kube-api-access-c8d74") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "kube-api-access-c8d74". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.569400 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph" (OuterVolumeSpecName: "ceph") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.570781 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.574765 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.582903 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.585769 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.585865 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.595352 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory" (OuterVolumeSpecName: "inventory") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.595710 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2d7c4de-054b-4396-984f-a0e55657c9d2" (UID: "d2d7c4de-054b-4396-984f-a0e55657c9d2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665539 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665620 4762 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665654 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665666 4762 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665678 4762 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665689 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665702 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665714 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8d74\" (UniqueName: \"kubernetes.io/projected/d2d7c4de-054b-4396-984f-a0e55657c9d2-kube-api-access-c8d74\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665724 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665738 4762 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665751 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:33 crc kubenswrapper[4762]: I1009 15:35:33.665763 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d2d7c4de-054b-4396-984f-a0e55657c9d2-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.000690 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" event={"ID":"d2d7c4de-054b-4396-984f-a0e55657c9d2","Type":"ContainerDied","Data":"74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd"} Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.000749 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74aee8c814fa624d4fe7a219b5f5275eb796d205e5264ef0f3687e72edebe3bd" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.000817 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-sbxmm" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.081818 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-gp9tb"] Oct 09 15:35:34 crc kubenswrapper[4762]: E1009 15:35:34.082331 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2d7c4de-054b-4396-984f-a0e55657c9d2" containerName="install-certs-openstack-openstack-cell1" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.082355 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2d7c4de-054b-4396-984f-a0e55657c9d2" containerName="install-certs-openstack-openstack-cell1" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.082574 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2d7c4de-054b-4396-984f-a0e55657c9d2" containerName="install-certs-openstack-openstack-cell1" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.083527 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.086995 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.087077 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.087229 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.089724 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.094702 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-gp9tb"] Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.176709 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.176811 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.177095 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwx9q\" (UniqueName: \"kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.177262 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.279591 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.279707 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.279752 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwx9q\" (UniqueName: \"kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.279785 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.284305 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.284729 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.286622 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.294860 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwx9q\" (UniqueName: \"kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q\") pod \"ceph-client-openstack-openstack-cell1-gp9tb\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.402260 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:34 crc kubenswrapper[4762]: I1009 15:35:34.938017 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-gp9tb"] Oct 09 15:35:35 crc kubenswrapper[4762]: I1009 15:35:35.024542 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" event={"ID":"9aea25b2-f748-46ba-9f78-821c7d1a5451","Type":"ContainerStarted","Data":"5c7ea8a284df3953560f9dfb96f9d2fd3d414fa19690d668f6fcfbc9ead34182"} Oct 09 15:35:36 crc kubenswrapper[4762]: I1009 15:35:36.037189 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" event={"ID":"9aea25b2-f748-46ba-9f78-821c7d1a5451","Type":"ContainerStarted","Data":"c8f2db5cec188511da6394f0fb67ada16784eb72ee705d7a154b0ca092c86ac8"} Oct 09 15:35:36 crc kubenswrapper[4762]: I1009 15:35:36.060478 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" podStartSLOduration=1.497288249 podStartE2EDuration="2.060461423s" podCreationTimestamp="2025-10-09 15:35:34 +0000 UTC" firstStartedPulling="2025-10-09 15:35:34.949846447 +0000 UTC m=+7810.723637496" lastFinishedPulling="2025-10-09 15:35:35.513019641 +0000 UTC m=+7811.286810670" observedRunningTime="2025-10-09 15:35:36.05654171 +0000 UTC m=+7811.830332749" watchObservedRunningTime="2025-10-09 15:35:36.060461423 +0000 UTC m=+7811.834252462" Oct 09 15:35:40 crc kubenswrapper[4762]: E1009 15:35:40.975899 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9aea25b2_f748_46ba_9f78_821c7d1a5451.slice/crio-conmon-c8f2db5cec188511da6394f0fb67ada16784eb72ee705d7a154b0ca092c86ac8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9aea25b2_f748_46ba_9f78_821c7d1a5451.slice/crio-c8f2db5cec188511da6394f0fb67ada16784eb72ee705d7a154b0ca092c86ac8.scope\": RecentStats: unable to find data in memory cache]" Oct 09 15:35:41 crc kubenswrapper[4762]: I1009 15:35:41.086504 4762 generic.go:334] "Generic (PLEG): container finished" podID="9aea25b2-f748-46ba-9f78-821c7d1a5451" containerID="c8f2db5cec188511da6394f0fb67ada16784eb72ee705d7a154b0ca092c86ac8" exitCode=0 Oct 09 15:35:41 crc kubenswrapper[4762]: I1009 15:35:41.086545 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" event={"ID":"9aea25b2-f748-46ba-9f78-821c7d1a5451","Type":"ContainerDied","Data":"c8f2db5cec188511da6394f0fb67ada16784eb72ee705d7a154b0ca092c86ac8"} Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.558945 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.656408 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key\") pod \"9aea25b2-f748-46ba-9f78-821c7d1a5451\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.656522 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory\") pod \"9aea25b2-f748-46ba-9f78-821c7d1a5451\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.656607 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph\") pod \"9aea25b2-f748-46ba-9f78-821c7d1a5451\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.656875 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwx9q\" (UniqueName: \"kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q\") pod \"9aea25b2-f748-46ba-9f78-821c7d1a5451\" (UID: \"9aea25b2-f748-46ba-9f78-821c7d1a5451\") " Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.666024 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q" (OuterVolumeSpecName: "kube-api-access-mwx9q") pod "9aea25b2-f748-46ba-9f78-821c7d1a5451" (UID: "9aea25b2-f748-46ba-9f78-821c7d1a5451"). InnerVolumeSpecName "kube-api-access-mwx9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.666426 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph" (OuterVolumeSpecName: "ceph") pod "9aea25b2-f748-46ba-9f78-821c7d1a5451" (UID: "9aea25b2-f748-46ba-9f78-821c7d1a5451"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.695226 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9aea25b2-f748-46ba-9f78-821c7d1a5451" (UID: "9aea25b2-f748-46ba-9f78-821c7d1a5451"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.696600 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory" (OuterVolumeSpecName: "inventory") pod "9aea25b2-f748-46ba-9f78-821c7d1a5451" (UID: "9aea25b2-f748-46ba-9f78-821c7d1a5451"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.758949 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwx9q\" (UniqueName: \"kubernetes.io/projected/9aea25b2-f748-46ba-9f78-821c7d1a5451-kube-api-access-mwx9q\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.758983 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.758994 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:42 crc kubenswrapper[4762]: I1009 15:35:42.759003 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9aea25b2-f748-46ba-9f78-821c7d1a5451-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.118818 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" event={"ID":"9aea25b2-f748-46ba-9f78-821c7d1a5451","Type":"ContainerDied","Data":"5c7ea8a284df3953560f9dfb96f9d2fd3d414fa19690d668f6fcfbc9ead34182"} Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.118860 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c7ea8a284df3953560f9dfb96f9d2fd3d414fa19690d668f6fcfbc9ead34182" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.118875 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-gp9tb" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.199420 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-c98nl"] Oct 09 15:35:43 crc kubenswrapper[4762]: E1009 15:35:43.200266 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aea25b2-f748-46ba-9f78-821c7d1a5451" containerName="ceph-client-openstack-openstack-cell1" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.200292 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aea25b2-f748-46ba-9f78-821c7d1a5451" containerName="ceph-client-openstack-openstack-cell1" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.200802 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aea25b2-f748-46ba-9f78-821c7d1a5451" containerName="ceph-client-openstack-openstack-cell1" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.202174 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.206137 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.206359 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.206616 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.206741 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.206813 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.221684 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-c98nl"] Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.269455 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.269551 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.269582 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.269604 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.269919 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r74l\" (UniqueName: \"kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.270044 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.373232 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.373383 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.373445 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.373546 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r74l\" (UniqueName: \"kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.373687 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.374155 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.375797 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.378936 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.379482 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.379544 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.391398 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r74l\" (UniqueName: \"kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.392747 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-c98nl\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:43 crc kubenswrapper[4762]: I1009 15:35:43.554990 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:35:44 crc kubenswrapper[4762]: I1009 15:35:44.109406 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-c98nl"] Oct 09 15:35:44 crc kubenswrapper[4762]: I1009 15:35:44.129687 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-c98nl" event={"ID":"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9","Type":"ContainerStarted","Data":"cfa4db3c2cd174a817616e00383ec3a479ce77de5b7f1975496a5fe052fc67bb"} Oct 09 15:35:45 crc kubenswrapper[4762]: I1009 15:35:45.141055 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-c98nl" event={"ID":"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9","Type":"ContainerStarted","Data":"d1e0dcad43f8fc3012cfc813924e8d00b57ecb9ed817c9035f5f94fd3b747b9f"} Oct 09 15:35:45 crc kubenswrapper[4762]: I1009 15:35:45.159710 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-c98nl" podStartSLOduration=1.7102476709999999 podStartE2EDuration="2.159691332s" podCreationTimestamp="2025-10-09 15:35:43 +0000 UTC" firstStartedPulling="2025-10-09 15:35:44.113184627 +0000 UTC m=+7819.886975666" lastFinishedPulling="2025-10-09 15:35:44.562628288 +0000 UTC m=+7820.336419327" observedRunningTime="2025-10-09 15:35:45.156660121 +0000 UTC m=+7820.930451160" watchObservedRunningTime="2025-10-09 15:35:45.159691332 +0000 UTC m=+7820.933482371" Oct 09 15:36:50 crc kubenswrapper[4762]: I1009 15:36:50.769561 4762 generic.go:334] "Generic (PLEG): container finished" podID="d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" containerID="d1e0dcad43f8fc3012cfc813924e8d00b57ecb9ed817c9035f5f94fd3b747b9f" exitCode=0 Oct 09 15:36:50 crc kubenswrapper[4762]: I1009 15:36:50.769658 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-c98nl" event={"ID":"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9","Type":"ContainerDied","Data":"d1e0dcad43f8fc3012cfc813924e8d00b57ecb9ed817c9035f5f94fd3b747b9f"} Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.270060 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381343 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381401 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381457 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381473 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381530 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r74l\" (UniqueName: \"kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.381602 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle\") pod \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\" (UID: \"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9\") " Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.389361 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l" (OuterVolumeSpecName: "kube-api-access-9r74l") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "kube-api-access-9r74l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.389832 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph" (OuterVolumeSpecName: "ceph") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.394517 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.416582 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.421867 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory" (OuterVolumeSpecName: "inventory") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.426536 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" (UID: "d18893bd-fa3b-45d1-a8ec-99d7c1daebe9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.484885 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.485320 4762 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.485334 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.485350 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.485362 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.485375 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r74l\" (UniqueName: \"kubernetes.io/projected/d18893bd-fa3b-45d1-a8ec-99d7c1daebe9-kube-api-access-9r74l\") on node \"crc\" DevicePath \"\"" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.789370 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-c98nl" event={"ID":"d18893bd-fa3b-45d1-a8ec-99d7c1daebe9","Type":"ContainerDied","Data":"cfa4db3c2cd174a817616e00383ec3a479ce77de5b7f1975496a5fe052fc67bb"} Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.789418 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfa4db3c2cd174a817616e00383ec3a479ce77de5b7f1975496a5fe052fc67bb" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.789470 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-c98nl" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.945625 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-gn57g"] Oct 09 15:36:52 crc kubenswrapper[4762]: E1009 15:36:52.952267 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" containerName="ovn-openstack-openstack-cell1" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.952295 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" containerName="ovn-openstack-openstack-cell1" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.952624 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="d18893bd-fa3b-45d1-a8ec-99d7c1daebe9" containerName="ovn-openstack-openstack-cell1" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.953667 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.958649 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.958891 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.959083 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.959233 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.959361 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.977071 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-gn57g"] Oct 09 15:36:52 crc kubenswrapper[4762]: I1009 15:36:52.988329 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097445 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlbzd\" (UniqueName: \"kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097537 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097573 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097625 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097686 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097740 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.097803 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: E1009 15:36:53.127969 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd18893bd_fa3b_45d1_a8ec_99d7c1daebe9.slice/crio-cfa4db3c2cd174a817616e00383ec3a479ce77de5b7f1975496a5fe052fc67bb\": RecentStats: unable to find data in memory cache]" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.200756 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.201526 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlbzd\" (UniqueName: \"kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.201632 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.201705 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.201807 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.201885 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.202032 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.207681 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.207826 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.208164 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.208338 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.208345 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.208559 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.218475 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlbzd\" (UniqueName: \"kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd\") pod \"neutron-metadata-openstack-openstack-cell1-gn57g\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.319695 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:36:53 crc kubenswrapper[4762]: I1009 15:36:53.862614 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-gn57g"] Oct 09 15:36:54 crc kubenswrapper[4762]: I1009 15:36:54.822850 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" event={"ID":"4958a251-a361-4786-ad42-3474a924f291","Type":"ContainerStarted","Data":"ec3cf04df687f79ada965d682a75354442673c729035fd87ee8ac8c3fb4a6902"} Oct 09 15:36:55 crc kubenswrapper[4762]: I1009 15:36:55.836913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" event={"ID":"4958a251-a361-4786-ad42-3474a924f291","Type":"ContainerStarted","Data":"523c11fd10e15daa7043c9eb8fc78e4fb604b0b1ab6b79d879f6bd84559ccf8e"} Oct 09 15:36:55 crc kubenswrapper[4762]: I1009 15:36:55.858864 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" podStartSLOduration=2.978119833 podStartE2EDuration="3.858844398s" podCreationTimestamp="2025-10-09 15:36:52 +0000 UTC" firstStartedPulling="2025-10-09 15:36:53.865704099 +0000 UTC m=+7889.639495138" lastFinishedPulling="2025-10-09 15:36:54.746428664 +0000 UTC m=+7890.520219703" observedRunningTime="2025-10-09 15:36:55.857299937 +0000 UTC m=+7891.631090996" watchObservedRunningTime="2025-10-09 15:36:55.858844398 +0000 UTC m=+7891.632635437" Oct 09 15:37:11 crc kubenswrapper[4762]: I1009 15:37:11.969402 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:37:11 crc kubenswrapper[4762]: I1009 15:37:11.969960 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:37:41 crc kubenswrapper[4762]: I1009 15:37:41.969553 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:37:41 crc kubenswrapper[4762]: I1009 15:37:41.970121 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:37:48 crc kubenswrapper[4762]: I1009 15:37:48.352924 4762 generic.go:334] "Generic (PLEG): container finished" podID="4958a251-a361-4786-ad42-3474a924f291" containerID="523c11fd10e15daa7043c9eb8fc78e4fb604b0b1ab6b79d879f6bd84559ccf8e" exitCode=0 Oct 09 15:37:48 crc kubenswrapper[4762]: I1009 15:37:48.353006 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" event={"ID":"4958a251-a361-4786-ad42-3474a924f291","Type":"ContainerDied","Data":"523c11fd10e15daa7043c9eb8fc78e4fb604b0b1ab6b79d879f6bd84559ccf8e"} Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.820537 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945414 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945478 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlbzd\" (UniqueName: \"kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945534 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945575 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945617 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945717 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.945870 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory\") pod \"4958a251-a361-4786-ad42-3474a924f291\" (UID: \"4958a251-a361-4786-ad42-3474a924f291\") " Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.951825 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph" (OuterVolumeSpecName: "ceph") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:49 crc kubenswrapper[4762]: I1009 15:37:49.952137 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd" (OuterVolumeSpecName: "kube-api-access-xlbzd") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "kube-api-access-xlbzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.037737 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.049088 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.049154 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlbzd\" (UniqueName: \"kubernetes.io/projected/4958a251-a361-4786-ad42-3474a924f291-kube-api-access-xlbzd\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.049169 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.061846 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory" (OuterVolumeSpecName: "inventory") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.068759 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.068782 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.072022 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4958a251-a361-4786-ad42-3474a924f291" (UID: "4958a251-a361-4786-ad42-3474a924f291"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.151035 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.151078 4762 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.151095 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.151109 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4958a251-a361-4786-ad42-3474a924f291-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.374391 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" event={"ID":"4958a251-a361-4786-ad42-3474a924f291","Type":"ContainerDied","Data":"ec3cf04df687f79ada965d682a75354442673c729035fd87ee8ac8c3fb4a6902"} Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.374704 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec3cf04df687f79ada965d682a75354442673c729035fd87ee8ac8c3fb4a6902" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.374515 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-gn57g" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.480849 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-cczp4"] Oct 09 15:37:50 crc kubenswrapper[4762]: E1009 15:37:50.481548 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4958a251-a361-4786-ad42-3474a924f291" containerName="neutron-metadata-openstack-openstack-cell1" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.481623 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4958a251-a361-4786-ad42-3474a924f291" containerName="neutron-metadata-openstack-openstack-cell1" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.482028 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4958a251-a361-4786-ad42-3474a924f291" containerName="neutron-metadata-openstack-openstack-cell1" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.482922 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.485258 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.485478 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.485610 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.485740 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.485906 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.495618 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-cczp4"] Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.558930 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.559010 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.559131 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h2pq\" (UniqueName: \"kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.559156 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.559265 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.559314 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661617 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h2pq\" (UniqueName: \"kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661684 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661785 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661867 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661912 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.661950 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.666376 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.666916 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.667078 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.667300 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.667506 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.680778 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h2pq\" (UniqueName: \"kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq\") pod \"libvirt-openstack-openstack-cell1-cczp4\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:50 crc kubenswrapper[4762]: I1009 15:37:50.805577 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:37:51 crc kubenswrapper[4762]: I1009 15:37:51.391113 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-cczp4"] Oct 09 15:37:51 crc kubenswrapper[4762]: I1009 15:37:51.398776 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:37:52 crc kubenswrapper[4762]: I1009 15:37:52.399608 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" event={"ID":"6d4bed23-7613-448e-937a-c4d0467c3e57","Type":"ContainerStarted","Data":"4a9695cbf764e0e577eb8b32178f0c10201d3ea74ad1eaefccb86231a2088403"} Oct 09 15:37:52 crc kubenswrapper[4762]: I1009 15:37:52.399974 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" event={"ID":"6d4bed23-7613-448e-937a-c4d0467c3e57","Type":"ContainerStarted","Data":"6f72fb4c10feea83077f64ec8afb9e7aac4ceb4a497fe526a855d69173344810"} Oct 09 15:38:11 crc kubenswrapper[4762]: I1009 15:38:11.969901 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:38:11 crc kubenswrapper[4762]: I1009 15:38:11.970421 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:38:11 crc kubenswrapper[4762]: I1009 15:38:11.970477 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:38:11 crc kubenswrapper[4762]: I1009 15:38:11.971352 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:38:11 crc kubenswrapper[4762]: I1009 15:38:11.971407 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d" gracePeriod=600 Oct 09 15:38:12 crc kubenswrapper[4762]: I1009 15:38:12.618849 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d" exitCode=0 Oct 09 15:38:12 crc kubenswrapper[4762]: I1009 15:38:12.618923 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d"} Oct 09 15:38:12 crc kubenswrapper[4762]: I1009 15:38:12.619302 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7"} Oct 09 15:38:12 crc kubenswrapper[4762]: I1009 15:38:12.619325 4762 scope.go:117] "RemoveContainer" containerID="da3bd6add501ed77419b80f68b04419592f8477497ce7284b22aa3129677ebf6" Oct 09 15:38:12 crc kubenswrapper[4762]: I1009 15:38:12.638674 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" podStartSLOduration=22.151572536 podStartE2EDuration="22.638631133s" podCreationTimestamp="2025-10-09 15:37:50 +0000 UTC" firstStartedPulling="2025-10-09 15:37:51.398502691 +0000 UTC m=+7947.172293730" lastFinishedPulling="2025-10-09 15:37:51.885561288 +0000 UTC m=+7947.659352327" observedRunningTime="2025-10-09 15:37:52.417129093 +0000 UTC m=+7948.190920132" watchObservedRunningTime="2025-10-09 15:38:12.638631133 +0000 UTC m=+7968.412422182" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.085947 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.091998 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.096252 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.224728 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbpzf\" (UniqueName: \"kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.225108 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.225375 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.327853 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbpzf\" (UniqueName: \"kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.327989 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.328083 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.328619 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.328764 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.353473 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbpzf\" (UniqueName: \"kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf\") pod \"redhat-operators-7zwb6\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.414922 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:19 crc kubenswrapper[4762]: I1009 15:40:19.911212 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:20 crc kubenswrapper[4762]: I1009 15:40:20.925123 4762 generic.go:334] "Generic (PLEG): container finished" podID="5723b80a-e9a3-458c-a735-2768820e466d" containerID="7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0" exitCode=0 Oct 09 15:40:20 crc kubenswrapper[4762]: I1009 15:40:20.925365 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerDied","Data":"7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0"} Oct 09 15:40:20 crc kubenswrapper[4762]: I1009 15:40:20.925804 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerStarted","Data":"6e822e239fdb764890056e8a6717d5790846aefb44386ea14a5e71cd86f963dc"} Oct 09 15:40:22 crc kubenswrapper[4762]: I1009 15:40:22.949067 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerStarted","Data":"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d"} Oct 09 15:40:25 crc kubenswrapper[4762]: I1009 15:40:25.984066 4762 generic.go:334] "Generic (PLEG): container finished" podID="5723b80a-e9a3-458c-a735-2768820e466d" containerID="65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d" exitCode=0 Oct 09 15:40:25 crc kubenswrapper[4762]: I1009 15:40:25.984145 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerDied","Data":"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d"} Oct 09 15:40:26 crc kubenswrapper[4762]: I1009 15:40:26.999012 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerStarted","Data":"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66"} Oct 09 15:40:27 crc kubenswrapper[4762]: I1009 15:40:27.026965 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7zwb6" podStartSLOduration=2.265309026 podStartE2EDuration="8.026941376s" podCreationTimestamp="2025-10-09 15:40:19 +0000 UTC" firstStartedPulling="2025-10-09 15:40:20.927251777 +0000 UTC m=+8096.701042816" lastFinishedPulling="2025-10-09 15:40:26.688884127 +0000 UTC m=+8102.462675166" observedRunningTime="2025-10-09 15:40:27.017288563 +0000 UTC m=+8102.791079602" watchObservedRunningTime="2025-10-09 15:40:27.026941376 +0000 UTC m=+8102.800732415" Oct 09 15:40:29 crc kubenswrapper[4762]: I1009 15:40:29.416167 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:29 crc kubenswrapper[4762]: I1009 15:40:29.416779 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:30 crc kubenswrapper[4762]: I1009 15:40:30.481164 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7zwb6" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="registry-server" probeResult="failure" output=< Oct 09 15:40:30 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:40:30 crc kubenswrapper[4762]: > Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.054594 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.061205 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.081508 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.108209 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs72p\" (UniqueName: \"kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.108274 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.108445 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.210854 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.210931 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs72p\" (UniqueName: \"kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.210960 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.211453 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.211746 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.236210 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.239159 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.249367 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs72p\" (UniqueName: \"kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p\") pod \"redhat-marketplace-m67xs\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.259415 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.313148 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.313240 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42pvk\" (UniqueName: \"kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.313696 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.393450 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.418110 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42pvk\" (UniqueName: \"kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.418306 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.418362 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.418812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.419261 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.438149 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42pvk\" (UniqueName: \"kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk\") pod \"certified-operators-pdqbd\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:36 crc kubenswrapper[4762]: I1009 15:40:36.621484 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:37 crc kubenswrapper[4762]: I1009 15:40:37.011342 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:37 crc kubenswrapper[4762]: I1009 15:40:37.125622 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:37 crc kubenswrapper[4762]: I1009 15:40:37.141103 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerStarted","Data":"419bbf9759fe1a45c0b024e3a077ba830f1396340f795de2799fd7cd9c1306fd"} Oct 09 15:40:38 crc kubenswrapper[4762]: I1009 15:40:38.158396 4762 generic.go:334] "Generic (PLEG): container finished" podID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerID="dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197" exitCode=0 Oct 09 15:40:38 crc kubenswrapper[4762]: I1009 15:40:38.158522 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerDied","Data":"dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197"} Oct 09 15:40:38 crc kubenswrapper[4762]: I1009 15:40:38.158969 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerStarted","Data":"5e8f85ee0bf965379199086459f870d1c3aab787f346823bd82a2c4c0ef77e0a"} Oct 09 15:40:38 crc kubenswrapper[4762]: I1009 15:40:38.163152 4762 generic.go:334] "Generic (PLEG): container finished" podID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerID="56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034" exitCode=0 Oct 09 15:40:38 crc kubenswrapper[4762]: I1009 15:40:38.163209 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerDied","Data":"56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034"} Oct 09 15:40:39 crc kubenswrapper[4762]: I1009 15:40:39.183208 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerStarted","Data":"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0"} Oct 09 15:40:39 crc kubenswrapper[4762]: I1009 15:40:39.466134 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:39 crc kubenswrapper[4762]: I1009 15:40:39.521916 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:40 crc kubenswrapper[4762]: I1009 15:40:40.198734 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerStarted","Data":"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70"} Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.211313 4762 generic.go:334] "Generic (PLEG): container finished" podID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerID="2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70" exitCode=0 Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.211399 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerDied","Data":"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70"} Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.215749 4762 generic.go:334] "Generic (PLEG): container finished" podID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerID="151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0" exitCode=0 Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.215778 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerDied","Data":"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0"} Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.970211 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:40:41 crc kubenswrapper[4762]: I1009 15:40:41.970673 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.226946 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerStarted","Data":"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370"} Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.230598 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerStarted","Data":"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c"} Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.253260 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m67xs" podStartSLOduration=2.591742496 podStartE2EDuration="6.253236381s" podCreationTimestamp="2025-10-09 15:40:36 +0000 UTC" firstStartedPulling="2025-10-09 15:40:38.165296569 +0000 UTC m=+8113.939087608" lastFinishedPulling="2025-10-09 15:40:41.826790454 +0000 UTC m=+8117.600581493" observedRunningTime="2025-10-09 15:40:42.24291796 +0000 UTC m=+8118.016708989" watchObservedRunningTime="2025-10-09 15:40:42.253236381 +0000 UTC m=+8118.027027440" Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.276749 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pdqbd" podStartSLOduration=2.774354467 podStartE2EDuration="6.276728537s" podCreationTimestamp="2025-10-09 15:40:36 +0000 UTC" firstStartedPulling="2025-10-09 15:40:38.162168437 +0000 UTC m=+8113.935959476" lastFinishedPulling="2025-10-09 15:40:41.664542507 +0000 UTC m=+8117.438333546" observedRunningTime="2025-10-09 15:40:42.267783923 +0000 UTC m=+8118.041574962" watchObservedRunningTime="2025-10-09 15:40:42.276728537 +0000 UTC m=+8118.050519566" Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.628329 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:42 crc kubenswrapper[4762]: I1009 15:40:42.628605 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7zwb6" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="registry-server" containerID="cri-o://d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66" gracePeriod=2 Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.148150 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.174866 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbpzf\" (UniqueName: \"kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf\") pod \"5723b80a-e9a3-458c-a735-2768820e466d\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.174931 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content\") pod \"5723b80a-e9a3-458c-a735-2768820e466d\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.175094 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities\") pod \"5723b80a-e9a3-458c-a735-2768820e466d\" (UID: \"5723b80a-e9a3-458c-a735-2768820e466d\") " Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.175737 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities" (OuterVolumeSpecName: "utilities") pod "5723b80a-e9a3-458c-a735-2768820e466d" (UID: "5723b80a-e9a3-458c-a735-2768820e466d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.181623 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf" (OuterVolumeSpecName: "kube-api-access-sbpzf") pod "5723b80a-e9a3-458c-a735-2768820e466d" (UID: "5723b80a-e9a3-458c-a735-2768820e466d"). InnerVolumeSpecName "kube-api-access-sbpzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.246597 4762 generic.go:334] "Generic (PLEG): container finished" podID="5723b80a-e9a3-458c-a735-2768820e466d" containerID="d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66" exitCode=0 Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.248044 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7zwb6" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.248026 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerDied","Data":"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66"} Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.248120 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7zwb6" event={"ID":"5723b80a-e9a3-458c-a735-2768820e466d","Type":"ContainerDied","Data":"6e822e239fdb764890056e8a6717d5790846aefb44386ea14a5e71cd86f963dc"} Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.248143 4762 scope.go:117] "RemoveContainer" containerID="d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.278366 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbpzf\" (UniqueName: \"kubernetes.io/projected/5723b80a-e9a3-458c-a735-2768820e466d-kube-api-access-sbpzf\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.278681 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.282204 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5723b80a-e9a3-458c-a735-2768820e466d" (UID: "5723b80a-e9a3-458c-a735-2768820e466d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.284687 4762 scope.go:117] "RemoveContainer" containerID="65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.305521 4762 scope.go:117] "RemoveContainer" containerID="7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.351224 4762 scope.go:117] "RemoveContainer" containerID="d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66" Oct 09 15:40:43 crc kubenswrapper[4762]: E1009 15:40:43.351620 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66\": container with ID starting with d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66 not found: ID does not exist" containerID="d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.351756 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66"} err="failed to get container status \"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66\": rpc error: code = NotFound desc = could not find container \"d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66\": container with ID starting with d531c44133ff9dabe88487986fcec9affb053a5e95f3f241f77d04e42f12ef66 not found: ID does not exist" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.351834 4762 scope.go:117] "RemoveContainer" containerID="65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d" Oct 09 15:40:43 crc kubenswrapper[4762]: E1009 15:40:43.352143 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d\": container with ID starting with 65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d not found: ID does not exist" containerID="65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.352242 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d"} err="failed to get container status \"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d\": rpc error: code = NotFound desc = could not find container \"65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d\": container with ID starting with 65c7d4ee1049233bec6f26daa3b64fb294dc6d4cd8dcaaaca17310c87b3f136d not found: ID does not exist" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.352310 4762 scope.go:117] "RemoveContainer" containerID="7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0" Oct 09 15:40:43 crc kubenswrapper[4762]: E1009 15:40:43.352557 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0\": container with ID starting with 7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0 not found: ID does not exist" containerID="7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.352658 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0"} err="failed to get container status \"7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0\": rpc error: code = NotFound desc = could not find container \"7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0\": container with ID starting with 7fd1f1995b72bb9d2a96bae4feaa6a7669623bc864dfae4af0651224afc525f0 not found: ID does not exist" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.380114 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5723b80a-e9a3-458c-a735-2768820e466d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.584195 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:43 crc kubenswrapper[4762]: I1009 15:40:43.593428 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7zwb6"] Oct 09 15:40:44 crc kubenswrapper[4762]: I1009 15:40:44.980481 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5723b80a-e9a3-458c-a735-2768820e466d" path="/var/lib/kubelet/pods/5723b80a-e9a3-458c-a735-2768820e466d/volumes" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.394522 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.394938 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.454524 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.624119 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.624240 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:46 crc kubenswrapper[4762]: I1009 15:40:46.680083 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:47 crc kubenswrapper[4762]: I1009 15:40:47.357497 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:47 crc kubenswrapper[4762]: I1009 15:40:47.374550 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.032680 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.228747 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.327936 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m67xs" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="registry-server" containerID="cri-o://1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370" gracePeriod=2 Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.798999 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.910120 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities\") pod \"c8be5ce1-ba13-42bf-8294-b877d295c395\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.910258 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs72p\" (UniqueName: \"kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p\") pod \"c8be5ce1-ba13-42bf-8294-b877d295c395\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.910362 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content\") pod \"c8be5ce1-ba13-42bf-8294-b877d295c395\" (UID: \"c8be5ce1-ba13-42bf-8294-b877d295c395\") " Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.910997 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities" (OuterVolumeSpecName: "utilities") pod "c8be5ce1-ba13-42bf-8294-b877d295c395" (UID: "c8be5ce1-ba13-42bf-8294-b877d295c395"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.919729 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p" (OuterVolumeSpecName: "kube-api-access-hs72p") pod "c8be5ce1-ba13-42bf-8294-b877d295c395" (UID: "c8be5ce1-ba13-42bf-8294-b877d295c395"). InnerVolumeSpecName "kube-api-access-hs72p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:40:49 crc kubenswrapper[4762]: I1009 15:40:49.925739 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8be5ce1-ba13-42bf-8294-b877d295c395" (UID: "c8be5ce1-ba13-42bf-8294-b877d295c395"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.013330 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.013376 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs72p\" (UniqueName: \"kubernetes.io/projected/c8be5ce1-ba13-42bf-8294-b877d295c395-kube-api-access-hs72p\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.013389 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8be5ce1-ba13-42bf-8294-b877d295c395-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.341706 4762 generic.go:334] "Generic (PLEG): container finished" podID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerID="1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370" exitCode=0 Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.341743 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerDied","Data":"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370"} Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.342174 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m67xs" event={"ID":"c8be5ce1-ba13-42bf-8294-b877d295c395","Type":"ContainerDied","Data":"419bbf9759fe1a45c0b024e3a077ba830f1396340f795de2799fd7cd9c1306fd"} Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.342205 4762 scope.go:117] "RemoveContainer" containerID="1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.341765 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m67xs" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.342305 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pdqbd" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="registry-server" containerID="cri-o://3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c" gracePeriod=2 Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.372930 4762 scope.go:117] "RemoveContainer" containerID="2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.376254 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.385897 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m67xs"] Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.392864 4762 scope.go:117] "RemoveContainer" containerID="56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.581392 4762 scope.go:117] "RemoveContainer" containerID="1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370" Oct 09 15:40:50 crc kubenswrapper[4762]: E1009 15:40:50.581941 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370\": container with ID starting with 1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370 not found: ID does not exist" containerID="1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.581976 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370"} err="failed to get container status \"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370\": rpc error: code = NotFound desc = could not find container \"1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370\": container with ID starting with 1a2cad583173ed52aabe08201bb16b426131f9bc27687a4f92fd405caa0d4370 not found: ID does not exist" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.581999 4762 scope.go:117] "RemoveContainer" containerID="2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70" Oct 09 15:40:50 crc kubenswrapper[4762]: E1009 15:40:50.582376 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70\": container with ID starting with 2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70 not found: ID does not exist" containerID="2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.582400 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70"} err="failed to get container status \"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70\": rpc error: code = NotFound desc = could not find container \"2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70\": container with ID starting with 2e8c94a05efee85653a0da36f2f0971d06a2251a879137b72d40527726ecdf70 not found: ID does not exist" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.582430 4762 scope.go:117] "RemoveContainer" containerID="56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034" Oct 09 15:40:50 crc kubenswrapper[4762]: E1009 15:40:50.582702 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034\": container with ID starting with 56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034 not found: ID does not exist" containerID="56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.582721 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034"} err="failed to get container status \"56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034\": rpc error: code = NotFound desc = could not find container \"56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034\": container with ID starting with 56509dcc4ba0e1df60201bf204d33625b577fc6c2365d48c3faee46070e67034 not found: ID does not exist" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.860746 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:50 crc kubenswrapper[4762]: I1009 15:40:50.977488 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" path="/var/lib/kubelet/pods/c8be5ce1-ba13-42bf-8294-b877d295c395/volumes" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.035071 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42pvk\" (UniqueName: \"kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk\") pod \"0d3e2095-05e4-4253-ade6-ec7119ea97af\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.035243 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities\") pod \"0d3e2095-05e4-4253-ade6-ec7119ea97af\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.035321 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content\") pod \"0d3e2095-05e4-4253-ade6-ec7119ea97af\" (UID: \"0d3e2095-05e4-4253-ade6-ec7119ea97af\") " Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.036521 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities" (OuterVolumeSpecName: "utilities") pod "0d3e2095-05e4-4253-ade6-ec7119ea97af" (UID: "0d3e2095-05e4-4253-ade6-ec7119ea97af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.036897 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.040822 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk" (OuterVolumeSpecName: "kube-api-access-42pvk") pod "0d3e2095-05e4-4253-ade6-ec7119ea97af" (UID: "0d3e2095-05e4-4253-ade6-ec7119ea97af"). InnerVolumeSpecName "kube-api-access-42pvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.084553 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d3e2095-05e4-4253-ade6-ec7119ea97af" (UID: "0d3e2095-05e4-4253-ade6-ec7119ea97af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.139118 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42pvk\" (UniqueName: \"kubernetes.io/projected/0d3e2095-05e4-4253-ade6-ec7119ea97af-kube-api-access-42pvk\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.139170 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d3e2095-05e4-4253-ade6-ec7119ea97af-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.354316 4762 generic.go:334] "Generic (PLEG): container finished" podID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerID="3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c" exitCode=0 Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.354362 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerDied","Data":"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c"} Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.354398 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pdqbd" event={"ID":"0d3e2095-05e4-4253-ade6-ec7119ea97af","Type":"ContainerDied","Data":"5e8f85ee0bf965379199086459f870d1c3aab787f346823bd82a2c4c0ef77e0a"} Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.354403 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pdqbd" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.354420 4762 scope.go:117] "RemoveContainer" containerID="3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.385669 4762 scope.go:117] "RemoveContainer" containerID="151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.391365 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.400579 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pdqbd"] Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.406073 4762 scope.go:117] "RemoveContainer" containerID="dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.426043 4762 scope.go:117] "RemoveContainer" containerID="3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c" Oct 09 15:40:51 crc kubenswrapper[4762]: E1009 15:40:51.426592 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c\": container with ID starting with 3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c not found: ID does not exist" containerID="3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.426664 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c"} err="failed to get container status \"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c\": rpc error: code = NotFound desc = could not find container \"3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c\": container with ID starting with 3d94bba23711645901ce882f02a40a9c9811d1b9aab97461694a0206f30c0f1c not found: ID does not exist" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.426697 4762 scope.go:117] "RemoveContainer" containerID="151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0" Oct 09 15:40:51 crc kubenswrapper[4762]: E1009 15:40:51.427003 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0\": container with ID starting with 151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0 not found: ID does not exist" containerID="151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.427038 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0"} err="failed to get container status \"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0\": rpc error: code = NotFound desc = could not find container \"151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0\": container with ID starting with 151b9d40748eb916687dde87deddf1d23edabde2e18c2fba127e1cec851b29a0 not found: ID does not exist" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.427061 4762 scope.go:117] "RemoveContainer" containerID="dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197" Oct 09 15:40:51 crc kubenswrapper[4762]: E1009 15:40:51.427327 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197\": container with ID starting with dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197 not found: ID does not exist" containerID="dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197" Oct 09 15:40:51 crc kubenswrapper[4762]: I1009 15:40:51.427368 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197"} err="failed to get container status \"dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197\": rpc error: code = NotFound desc = could not find container \"dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197\": container with ID starting with dac267ba5c4d79b3b8112359fd5727cd756c9cd6a5d50d3205a62e779ad87197 not found: ID does not exist" Oct 09 15:40:52 crc kubenswrapper[4762]: I1009 15:40:52.979395 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" path="/var/lib/kubelet/pods/0d3e2095-05e4-4253-ade6-ec7119ea97af/volumes" Oct 09 15:41:11 crc kubenswrapper[4762]: I1009 15:41:11.969410 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:41:11 crc kubenswrapper[4762]: I1009 15:41:11.970046 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.043417 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.044935 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.044973 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.044992 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.044999 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045026 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045161 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045175 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045181 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045193 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045219 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045232 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045238 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="extract-utilities" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045251 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045257 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045323 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045331 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="extract-content" Oct 09 15:41:29 crc kubenswrapper[4762]: E1009 15:41:29.045346 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045352 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045578 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8be5ce1-ba13-42bf-8294-b877d295c395" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045592 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3e2095-05e4-4253-ade6-ec7119ea97af" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.045606 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="5723b80a-e9a3-458c-a735-2768820e466d" containerName="registry-server" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.048749 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.079771 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.166074 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.166148 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.166754 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbmhh\" (UniqueName: \"kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.269683 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbmhh\" (UniqueName: \"kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.269903 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.269960 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.270533 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.270696 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.296552 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbmhh\" (UniqueName: \"kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh\") pod \"community-operators-xscvc\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.372730 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:29 crc kubenswrapper[4762]: I1009 15:41:29.925740 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:30 crc kubenswrapper[4762]: I1009 15:41:30.798856 4762 generic.go:334] "Generic (PLEG): container finished" podID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerID="00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311" exitCode=0 Oct 09 15:41:30 crc kubenswrapper[4762]: I1009 15:41:30.799386 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerDied","Data":"00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311"} Oct 09 15:41:30 crc kubenswrapper[4762]: I1009 15:41:30.799432 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerStarted","Data":"9477f34aa3fdd489ca43a7668d6948fbde4fc89a21badd3d0e9414b7ae7387ca"} Oct 09 15:41:31 crc kubenswrapper[4762]: I1009 15:41:31.810589 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerStarted","Data":"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8"} Oct 09 15:41:33 crc kubenswrapper[4762]: I1009 15:41:33.833794 4762 generic.go:334] "Generic (PLEG): container finished" podID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerID="31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8" exitCode=0 Oct 09 15:41:33 crc kubenswrapper[4762]: I1009 15:41:33.833918 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerDied","Data":"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8"} Oct 09 15:41:34 crc kubenswrapper[4762]: I1009 15:41:34.854082 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerStarted","Data":"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183"} Oct 09 15:41:34 crc kubenswrapper[4762]: I1009 15:41:34.882520 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xscvc" podStartSLOduration=2.364181975 podStartE2EDuration="5.882491004s" podCreationTimestamp="2025-10-09 15:41:29 +0000 UTC" firstStartedPulling="2025-10-09 15:41:30.802197302 +0000 UTC m=+8166.575988351" lastFinishedPulling="2025-10-09 15:41:34.320506341 +0000 UTC m=+8170.094297380" observedRunningTime="2025-10-09 15:41:34.870580561 +0000 UTC m=+8170.644371620" watchObservedRunningTime="2025-10-09 15:41:34.882491004 +0000 UTC m=+8170.656282043" Oct 09 15:41:39 crc kubenswrapper[4762]: I1009 15:41:39.373043 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:39 crc kubenswrapper[4762]: I1009 15:41:39.373672 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:39 crc kubenswrapper[4762]: I1009 15:41:39.423330 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:39 crc kubenswrapper[4762]: I1009 15:41:39.957389 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:40 crc kubenswrapper[4762]: I1009 15:41:40.016278 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.919076 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xscvc" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="registry-server" containerID="cri-o://cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183" gracePeriod=2 Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.969402 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.969482 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.969563 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.970171 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:41:41 crc kubenswrapper[4762]: I1009 15:41:41.970254 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" gracePeriod=600 Oct 09 15:41:42 crc kubenswrapper[4762]: E1009 15:41:42.594641 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.697152 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.807032 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbmhh\" (UniqueName: \"kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh\") pod \"2b5fade3-13e6-465d-a781-3ac0fe84e262\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.807173 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities\") pod \"2b5fade3-13e6-465d-a781-3ac0fe84e262\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.807201 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content\") pod \"2b5fade3-13e6-465d-a781-3ac0fe84e262\" (UID: \"2b5fade3-13e6-465d-a781-3ac0fe84e262\") " Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.846508 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities" (OuterVolumeSpecName: "utilities") pod "2b5fade3-13e6-465d-a781-3ac0fe84e262" (UID: "2b5fade3-13e6-465d-a781-3ac0fe84e262"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.847327 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh" (OuterVolumeSpecName: "kube-api-access-dbmhh") pod "2b5fade3-13e6-465d-a781-3ac0fe84e262" (UID: "2b5fade3-13e6-465d-a781-3ac0fe84e262"). InnerVolumeSpecName "kube-api-access-dbmhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.859354 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b5fade3-13e6-465d-a781-3ac0fe84e262" (UID: "2b5fade3-13e6-465d-a781-3ac0fe84e262"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.909381 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.909424 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5fade3-13e6-465d-a781-3ac0fe84e262-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.909439 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbmhh\" (UniqueName: \"kubernetes.io/projected/2b5fade3-13e6-465d-a781-3ac0fe84e262-kube-api-access-dbmhh\") on node \"crc\" DevicePath \"\"" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.930001 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" exitCode=0 Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.930072 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7"} Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.930107 4762 scope.go:117] "RemoveContainer" containerID="140ab4dce1cda567b18618f3538dba93b49b9c190259abd3ff24f25be678f05d" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.930818 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:41:42 crc kubenswrapper[4762]: E1009 15:41:42.931062 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.932930 4762 generic.go:334] "Generic (PLEG): container finished" podID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerID="cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183" exitCode=0 Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.932951 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerDied","Data":"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183"} Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.932967 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xscvc" event={"ID":"2b5fade3-13e6-465d-a781-3ac0fe84e262","Type":"ContainerDied","Data":"9477f34aa3fdd489ca43a7668d6948fbde4fc89a21badd3d0e9414b7ae7387ca"} Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.933013 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xscvc" Oct 09 15:41:42 crc kubenswrapper[4762]: I1009 15:41:42.974485 4762 scope.go:117] "RemoveContainer" containerID="cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.005594 4762 scope.go:117] "RemoveContainer" containerID="31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.005771 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.028067 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xscvc"] Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.044755 4762 scope.go:117] "RemoveContainer" containerID="00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.113557 4762 scope.go:117] "RemoveContainer" containerID="cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183" Oct 09 15:41:43 crc kubenswrapper[4762]: E1009 15:41:43.114171 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183\": container with ID starting with cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183 not found: ID does not exist" containerID="cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.114233 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183"} err="failed to get container status \"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183\": rpc error: code = NotFound desc = could not find container \"cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183\": container with ID starting with cb3842f9437ffa0d4ed8a7ab355d852e48927eab4ca07e342b011481ef25b183 not found: ID does not exist" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.114266 4762 scope.go:117] "RemoveContainer" containerID="31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8" Oct 09 15:41:43 crc kubenswrapper[4762]: E1009 15:41:43.114689 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8\": container with ID starting with 31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8 not found: ID does not exist" containerID="31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.114724 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8"} err="failed to get container status \"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8\": rpc error: code = NotFound desc = could not find container \"31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8\": container with ID starting with 31cbbbdf019cc6345294b36405b490f45e28f6dfa835274565c6d79c8652d9d8 not found: ID does not exist" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.114742 4762 scope.go:117] "RemoveContainer" containerID="00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311" Oct 09 15:41:43 crc kubenswrapper[4762]: E1009 15:41:43.115014 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311\": container with ID starting with 00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311 not found: ID does not exist" containerID="00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311" Oct 09 15:41:43 crc kubenswrapper[4762]: I1009 15:41:43.115036 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311"} err="failed to get container status \"00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311\": rpc error: code = NotFound desc = could not find container \"00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311\": container with ID starting with 00dc885ebf40cc355b1af8b9629e219a26c2ac0f7a20b274bdf73361022d9311 not found: ID does not exist" Oct 09 15:41:44 crc kubenswrapper[4762]: I1009 15:41:44.977993 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" path="/var/lib/kubelet/pods/2b5fade3-13e6-465d-a781-3ac0fe84e262/volumes" Oct 09 15:41:57 crc kubenswrapper[4762]: I1009 15:41:57.965349 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:41:57 crc kubenswrapper[4762]: E1009 15:41:57.966067 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:42:12 crc kubenswrapper[4762]: I1009 15:42:12.965797 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:42:12 crc kubenswrapper[4762]: E1009 15:42:12.966726 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:42:25 crc kubenswrapper[4762]: I1009 15:42:25.973736 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:42:25 crc kubenswrapper[4762]: E1009 15:42:25.974921 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:42:28 crc kubenswrapper[4762]: I1009 15:42:28.450093 4762 generic.go:334] "Generic (PLEG): container finished" podID="6d4bed23-7613-448e-937a-c4d0467c3e57" containerID="4a9695cbf764e0e577eb8b32178f0c10201d3ea74ad1eaefccb86231a2088403" exitCode=0 Oct 09 15:42:28 crc kubenswrapper[4762]: I1009 15:42:28.450206 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" event={"ID":"6d4bed23-7613-448e-937a-c4d0467c3e57","Type":"ContainerDied","Data":"4a9695cbf764e0e577eb8b32178f0c10201d3ea74ad1eaefccb86231a2088403"} Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.020933 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.094932 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.094983 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.095165 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.095219 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5h2pq\" (UniqueName: \"kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.095247 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.095368 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key\") pod \"6d4bed23-7613-448e-937a-c4d0467c3e57\" (UID: \"6d4bed23-7613-448e-937a-c4d0467c3e57\") " Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.100610 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.100623 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq" (OuterVolumeSpecName: "kube-api-access-5h2pq") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "kube-api-access-5h2pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.100949 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph" (OuterVolumeSpecName: "ceph") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.126462 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.131888 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory" (OuterVolumeSpecName: "inventory") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.134868 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "6d4bed23-7613-448e-937a-c4d0467c3e57" (UID: "6d4bed23-7613-448e-937a-c4d0467c3e57"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197334 4762 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197373 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5h2pq\" (UniqueName: \"kubernetes.io/projected/6d4bed23-7613-448e-937a-c4d0467c3e57-kube-api-access-5h2pq\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197385 4762 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197393 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197404 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.197414 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4bed23-7613-448e-937a-c4d0467c3e57-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.468871 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.468864 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-cczp4" event={"ID":"6d4bed23-7613-448e-937a-c4d0467c3e57","Type":"ContainerDied","Data":"6f72fb4c10feea83077f64ec8afb9e7aac4ceb4a497fe526a855d69173344810"} Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.469012 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f72fb4c10feea83077f64ec8afb9e7aac4ceb4a497fe526a855d69173344810" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.577732 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-hxbfw"] Oct 09 15:42:30 crc kubenswrapper[4762]: E1009 15:42:30.578336 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4bed23-7613-448e-937a-c4d0467c3e57" containerName="libvirt-openstack-openstack-cell1" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578360 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4bed23-7613-448e-937a-c4d0467c3e57" containerName="libvirt-openstack-openstack-cell1" Oct 09 15:42:30 crc kubenswrapper[4762]: E1009 15:42:30.578376 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="registry-server" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578385 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="registry-server" Oct 09 15:42:30 crc kubenswrapper[4762]: E1009 15:42:30.578410 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="extract-utilities" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578419 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="extract-utilities" Oct 09 15:42:30 crc kubenswrapper[4762]: E1009 15:42:30.578453 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="extract-content" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578461 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="extract-content" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578771 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b5fade3-13e6-465d-a781-3ac0fe84e262" containerName="registry-server" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.578810 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d4bed23-7613-448e-937a-c4d0467c3e57" containerName="libvirt-openstack-openstack-cell1" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.579821 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.585413 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.585699 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.585858 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.586000 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.586152 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.586347 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.586549 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.591467 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-hxbfw"] Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708161 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708230 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708261 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708329 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708609 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708762 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708820 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708891 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.708953 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.709087 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-529c5\" (UniqueName: \"kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.709156 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.810887 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.810961 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.810986 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811020 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811066 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811099 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811124 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811146 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811166 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811187 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-529c5\" (UniqueName: \"kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.811204 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.812468 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.814623 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.815075 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.815688 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.815783 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.816160 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.816260 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.816179 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.816829 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.826357 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.831274 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-529c5\" (UniqueName: \"kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5\") pod \"nova-cell1-openstack-openstack-cell1-hxbfw\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:30 crc kubenswrapper[4762]: I1009 15:42:30.930898 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:42:31 crc kubenswrapper[4762]: I1009 15:42:31.489213 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-hxbfw"] Oct 09 15:42:32 crc kubenswrapper[4762]: I1009 15:42:32.488691 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" event={"ID":"a2527d19-46b0-464c-9eab-69e73c1aceea","Type":"ContainerStarted","Data":"d34fe9fdfbc99817fe50777ae6962886a92e77b9051f8b53100850f0e2e1dc97"} Oct 09 15:42:32 crc kubenswrapper[4762]: I1009 15:42:32.489068 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" event={"ID":"a2527d19-46b0-464c-9eab-69e73c1aceea","Type":"ContainerStarted","Data":"a2800dcd739eb3325e4aacc35acdeb7201078c36c19664fd0a4011b790fae31b"} Oct 09 15:42:32 crc kubenswrapper[4762]: I1009 15:42:32.511720 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" podStartSLOduration=2.052114118 podStartE2EDuration="2.511693524s" podCreationTimestamp="2025-10-09 15:42:30 +0000 UTC" firstStartedPulling="2025-10-09 15:42:31.481788457 +0000 UTC m=+8227.255579496" lastFinishedPulling="2025-10-09 15:42:31.941367863 +0000 UTC m=+8227.715158902" observedRunningTime="2025-10-09 15:42:32.503384428 +0000 UTC m=+8228.277175487" watchObservedRunningTime="2025-10-09 15:42:32.511693524 +0000 UTC m=+8228.285484583" Oct 09 15:42:37 crc kubenswrapper[4762]: I1009 15:42:37.966675 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:42:37 crc kubenswrapper[4762]: E1009 15:42:37.968917 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:42:49 crc kubenswrapper[4762]: I1009 15:42:49.966605 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:42:49 crc kubenswrapper[4762]: E1009 15:42:49.967372 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:43:00 crc kubenswrapper[4762]: I1009 15:43:00.965342 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:43:00 crc kubenswrapper[4762]: E1009 15:43:00.966260 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:43:13 crc kubenswrapper[4762]: I1009 15:43:13.965581 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:43:13 crc kubenswrapper[4762]: E1009 15:43:13.966623 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:43:25 crc kubenswrapper[4762]: I1009 15:43:25.966057 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:43:25 crc kubenswrapper[4762]: E1009 15:43:25.966997 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:43:37 crc kubenswrapper[4762]: I1009 15:43:37.966820 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:43:37 crc kubenswrapper[4762]: E1009 15:43:37.968498 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:43:49 crc kubenswrapper[4762]: I1009 15:43:49.966090 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:43:49 crc kubenswrapper[4762]: E1009 15:43:49.966942 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:44:01 crc kubenswrapper[4762]: I1009 15:44:01.965078 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:44:01 crc kubenswrapper[4762]: E1009 15:44:01.966068 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:44:16 crc kubenswrapper[4762]: I1009 15:44:16.966262 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:44:16 crc kubenswrapper[4762]: E1009 15:44:16.967143 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:44:29 crc kubenswrapper[4762]: I1009 15:44:29.965077 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:44:29 crc kubenswrapper[4762]: E1009 15:44:29.965806 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:44:42 crc kubenswrapper[4762]: I1009 15:44:42.965353 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:44:42 crc kubenswrapper[4762]: E1009 15:44:42.966089 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:44:54 crc kubenswrapper[4762]: I1009 15:44:54.970532 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:44:54 crc kubenswrapper[4762]: E1009 15:44:54.971500 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.155010 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt"] Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.157424 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.165390 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.166345 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.196089 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt"] Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.301438 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.301946 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.302025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89x7s\" (UniqueName: \"kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.404521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.404620 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.404684 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89x7s\" (UniqueName: \"kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.405586 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.411021 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.421698 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89x7s\" (UniqueName: \"kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s\") pod \"collect-profiles-29333745-m64nt\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:00 crc kubenswrapper[4762]: I1009 15:45:00.512896 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:01 crc kubenswrapper[4762]: I1009 15:45:01.012547 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt"] Oct 09 15:45:01 crc kubenswrapper[4762]: I1009 15:45:01.942988 4762 generic.go:334] "Generic (PLEG): container finished" podID="a2db795f-b782-4b6a-b46f-0fb757434cac" containerID="699682f1f7223f81a3591db141127a07d2a38e5736f992cbc2c99b3751c81277" exitCode=0 Oct 09 15:45:01 crc kubenswrapper[4762]: I1009 15:45:01.943059 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" event={"ID":"a2db795f-b782-4b6a-b46f-0fb757434cac","Type":"ContainerDied","Data":"699682f1f7223f81a3591db141127a07d2a38e5736f992cbc2c99b3751c81277"} Oct 09 15:45:01 crc kubenswrapper[4762]: I1009 15:45:01.943535 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" event={"ID":"a2db795f-b782-4b6a-b46f-0fb757434cac","Type":"ContainerStarted","Data":"6704014acac0b2c86ce0db5e306b43c1ce4ad9e96b8efbadb4f536cb5250503d"} Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.351702 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.477332 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume\") pod \"a2db795f-b782-4b6a-b46f-0fb757434cac\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.477745 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume\") pod \"a2db795f-b782-4b6a-b46f-0fb757434cac\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.477857 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89x7s\" (UniqueName: \"kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s\") pod \"a2db795f-b782-4b6a-b46f-0fb757434cac\" (UID: \"a2db795f-b782-4b6a-b46f-0fb757434cac\") " Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.478667 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume" (OuterVolumeSpecName: "config-volume") pod "a2db795f-b782-4b6a-b46f-0fb757434cac" (UID: "a2db795f-b782-4b6a-b46f-0fb757434cac"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.479154 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2db795f-b782-4b6a-b46f-0fb757434cac-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.484278 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a2db795f-b782-4b6a-b46f-0fb757434cac" (UID: "a2db795f-b782-4b6a-b46f-0fb757434cac"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.484939 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s" (OuterVolumeSpecName: "kube-api-access-89x7s") pod "a2db795f-b782-4b6a-b46f-0fb757434cac" (UID: "a2db795f-b782-4b6a-b46f-0fb757434cac"). InnerVolumeSpecName "kube-api-access-89x7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.582774 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2db795f-b782-4b6a-b46f-0fb757434cac-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.582849 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89x7s\" (UniqueName: \"kubernetes.io/projected/a2db795f-b782-4b6a-b46f-0fb757434cac-kube-api-access-89x7s\") on node \"crc\" DevicePath \"\"" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.962927 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" event={"ID":"a2db795f-b782-4b6a-b46f-0fb757434cac","Type":"ContainerDied","Data":"6704014acac0b2c86ce0db5e306b43c1ce4ad9e96b8efbadb4f536cb5250503d"} Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.963248 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6704014acac0b2c86ce0db5e306b43c1ce4ad9e96b8efbadb4f536cb5250503d" Oct 09 15:45:03 crc kubenswrapper[4762]: I1009 15:45:03.962963 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333745-m64nt" Oct 09 15:45:04 crc kubenswrapper[4762]: I1009 15:45:04.429415 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr"] Oct 09 15:45:04 crc kubenswrapper[4762]: I1009 15:45:04.437518 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333700-jl2hr"] Oct 09 15:45:04 crc kubenswrapper[4762]: I1009 15:45:04.980731 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac9d3991-fdff-41b8-8f2c-7ee68463518b" path="/var/lib/kubelet/pods/ac9d3991-fdff-41b8-8f2c-7ee68463518b/volumes" Oct 09 15:45:08 crc kubenswrapper[4762]: I1009 15:45:08.965670 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:45:08 crc kubenswrapper[4762]: E1009 15:45:08.966191 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:45:22 crc kubenswrapper[4762]: I1009 15:45:22.966420 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:45:22 crc kubenswrapper[4762]: E1009 15:45:22.968275 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:45:36 crc kubenswrapper[4762]: I1009 15:45:36.966153 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:45:36 crc kubenswrapper[4762]: E1009 15:45:36.967119 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:45:38 crc kubenswrapper[4762]: I1009 15:45:38.929920 4762 scope.go:117] "RemoveContainer" containerID="9e20acdabed285a9eeb1dc1611543c123515b746fbfedfef4d2fa0f7c216f8dd" Oct 09 15:45:48 crc kubenswrapper[4762]: I1009 15:45:48.965332 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:45:48 crc kubenswrapper[4762]: E1009 15:45:48.966040 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:45:59 crc kubenswrapper[4762]: I1009 15:45:59.966021 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:45:59 crc kubenswrapper[4762]: E1009 15:45:59.966873 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:46:03 crc kubenswrapper[4762]: I1009 15:46:03.597148 4762 generic.go:334] "Generic (PLEG): container finished" podID="a2527d19-46b0-464c-9eab-69e73c1aceea" containerID="d34fe9fdfbc99817fe50777ae6962886a92e77b9051f8b53100850f0e2e1dc97" exitCode=0 Oct 09 15:46:03 crc kubenswrapper[4762]: I1009 15:46:03.597257 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" event={"ID":"a2527d19-46b0-464c-9eab-69e73c1aceea","Type":"ContainerDied","Data":"d34fe9fdfbc99817fe50777ae6962886a92e77b9051f8b53100850f0e2e1dc97"} Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.108495 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.164686 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165609 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165678 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165748 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165811 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165845 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165892 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.165942 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.166036 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.166083 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-529c5\" (UniqueName: \"kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.166157 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory\") pod \"a2527d19-46b0-464c-9eab-69e73c1aceea\" (UID: \"a2527d19-46b0-464c-9eab-69e73c1aceea\") " Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.173109 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5" (OuterVolumeSpecName: "kube-api-access-529c5") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "kube-api-access-529c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.174137 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph" (OuterVolumeSpecName: "ceph") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.181799 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.199970 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.202122 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.207580 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.207690 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.208426 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.215253 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory" (OuterVolumeSpecName: "inventory") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.225875 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.231354 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a2527d19-46b0-464c-9eab-69e73c1aceea" (UID: "a2527d19-46b0-464c-9eab-69e73c1aceea"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268808 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268842 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268852 4762 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268862 4762 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268871 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268881 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268890 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268903 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268913 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-529c5\" (UniqueName: \"kubernetes.io/projected/a2527d19-46b0-464c-9eab-69e73c1aceea-kube-api-access-529c5\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268921 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2527d19-46b0-464c-9eab-69e73c1aceea-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.268929 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a2527d19-46b0-464c-9eab-69e73c1aceea-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.620417 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" event={"ID":"a2527d19-46b0-464c-9eab-69e73c1aceea","Type":"ContainerDied","Data":"a2800dcd739eb3325e4aacc35acdeb7201078c36c19664fd0a4011b790fae31b"} Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.620477 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2800dcd739eb3325e4aacc35acdeb7201078c36c19664fd0a4011b790fae31b" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.620513 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-hxbfw" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.718951 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-rl6rc"] Oct 09 15:46:05 crc kubenswrapper[4762]: E1009 15:46:05.719515 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2db795f-b782-4b6a-b46f-0fb757434cac" containerName="collect-profiles" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.719532 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2db795f-b782-4b6a-b46f-0fb757434cac" containerName="collect-profiles" Oct 09 15:46:05 crc kubenswrapper[4762]: E1009 15:46:05.719599 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2527d19-46b0-464c-9eab-69e73c1aceea" containerName="nova-cell1-openstack-openstack-cell1" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.719610 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2527d19-46b0-464c-9eab-69e73c1aceea" containerName="nova-cell1-openstack-openstack-cell1" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.719889 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2db795f-b782-4b6a-b46f-0fb757434cac" containerName="collect-profiles" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.719925 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2527d19-46b0-464c-9eab-69e73c1aceea" containerName="nova-cell1-openstack-openstack-cell1" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.720895 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.722865 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.723025 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.723038 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.723373 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.723378 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.732505 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-rl6rc"] Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.781785 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.781871 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.781931 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.781965 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.782068 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.782105 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.782165 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.782306 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7kdk\" (UniqueName: \"kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884191 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884273 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884335 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884374 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7kdk\" (UniqueName: \"kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884411 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884468 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884492 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.884523 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.887943 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.888040 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.888572 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.888719 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.890344 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.890584 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.893072 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:05 crc kubenswrapper[4762]: I1009 15:46:05.901467 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7kdk\" (UniqueName: \"kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk\") pod \"telemetry-openstack-openstack-cell1-rl6rc\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:06 crc kubenswrapper[4762]: I1009 15:46:06.047095 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:46:06 crc kubenswrapper[4762]: I1009 15:46:06.609213 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-rl6rc"] Oct 09 15:46:06 crc kubenswrapper[4762]: I1009 15:46:06.617958 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:46:06 crc kubenswrapper[4762]: I1009 15:46:06.637321 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" event={"ID":"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb","Type":"ContainerStarted","Data":"c9866b520e13925b14c450caca936d894694ec16438d76d8e53672ed1274e221"} Oct 09 15:46:07 crc kubenswrapper[4762]: I1009 15:46:07.659503 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" event={"ID":"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb","Type":"ContainerStarted","Data":"423530b5bd83a4d7bab958e96c8b0d6747ebdf8a7f0fb85a9aaddc856320ec10"} Oct 09 15:46:07 crc kubenswrapper[4762]: I1009 15:46:07.680623 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" podStartSLOduration=2.057968998 podStartE2EDuration="2.680603802s" podCreationTimestamp="2025-10-09 15:46:05 +0000 UTC" firstStartedPulling="2025-10-09 15:46:06.617711588 +0000 UTC m=+8442.391502627" lastFinishedPulling="2025-10-09 15:46:07.240346392 +0000 UTC m=+8443.014137431" observedRunningTime="2025-10-09 15:46:07.678571818 +0000 UTC m=+8443.452362877" watchObservedRunningTime="2025-10-09 15:46:07.680603802 +0000 UTC m=+8443.454394841" Oct 09 15:46:10 crc kubenswrapper[4762]: I1009 15:46:10.965880 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:46:10 crc kubenswrapper[4762]: E1009 15:46:10.966739 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:46:23 crc kubenswrapper[4762]: I1009 15:46:23.965736 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:46:23 crc kubenswrapper[4762]: E1009 15:46:23.966389 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:46:38 crc kubenswrapper[4762]: I1009 15:46:38.965183 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:46:38 crc kubenswrapper[4762]: E1009 15:46:38.966086 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:46:52 crc kubenswrapper[4762]: I1009 15:46:52.966425 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:46:54 crc kubenswrapper[4762]: I1009 15:46:54.110683 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2"} Oct 09 15:49:11 crc kubenswrapper[4762]: I1009 15:49:11.970001 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:49:11 crc kubenswrapper[4762]: I1009 15:49:11.970613 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:49:41 crc kubenswrapper[4762]: I1009 15:49:41.969872 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:49:41 crc kubenswrapper[4762]: I1009 15:49:41.970686 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:50:11 crc kubenswrapper[4762]: I1009 15:50:11.969503 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:50:11 crc kubenswrapper[4762]: I1009 15:50:11.970165 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:50:11 crc kubenswrapper[4762]: I1009 15:50:11.970215 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:50:11 crc kubenswrapper[4762]: I1009 15:50:11.970940 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:50:11 crc kubenswrapper[4762]: I1009 15:50:11.971006 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2" gracePeriod=600 Oct 09 15:50:13 crc kubenswrapper[4762]: I1009 15:50:13.057015 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2" exitCode=0 Oct 09 15:50:13 crc kubenswrapper[4762]: I1009 15:50:13.057076 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2"} Oct 09 15:50:13 crc kubenswrapper[4762]: I1009 15:50:13.057577 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274"} Oct 09 15:50:13 crc kubenswrapper[4762]: I1009 15:50:13.057599 4762 scope.go:117] "RemoveContainer" containerID="207079ce18565c7a249f5d4caa9c694e48f23ecdfd20db7075fdc36cd1ceb1b7" Oct 09 15:50:22 crc kubenswrapper[4762]: I1009 15:50:22.139000 4762 generic.go:334] "Generic (PLEG): container finished" podID="846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" containerID="423530b5bd83a4d7bab958e96c8b0d6747ebdf8a7f0fb85a9aaddc856320ec10" exitCode=0 Oct 09 15:50:22 crc kubenswrapper[4762]: I1009 15:50:22.139089 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" event={"ID":"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb","Type":"ContainerDied","Data":"423530b5bd83a4d7bab958e96c8b0d6747ebdf8a7f0fb85a9aaddc856320ec10"} Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.584474 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.609829 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.609882 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.609974 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.610085 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.611832 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.612262 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.612324 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7kdk\" (UniqueName: \"kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.612352 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0\") pod \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\" (UID: \"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb\") " Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.616895 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.617905 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk" (OuterVolumeSpecName: "kube-api-access-g7kdk") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "kube-api-access-g7kdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.618782 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph" (OuterVolumeSpecName: "ceph") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.647559 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.647916 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory" (OuterVolumeSpecName: "inventory") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.650992 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.651178 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.661669 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" (UID: "846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715372 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715423 4762 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715438 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715452 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7kdk\" (UniqueName: \"kubernetes.io/projected/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-kube-api-access-g7kdk\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715481 4762 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715493 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715504 4762 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:23 crc kubenswrapper[4762]: I1009 15:50:23.715515 4762 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.171069 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" event={"ID":"846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb","Type":"ContainerDied","Data":"c9866b520e13925b14c450caca936d894694ec16438d76d8e53672ed1274e221"} Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.171126 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-rl6rc" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.174611 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9866b520e13925b14c450caca936d894694ec16438d76d8e53672ed1274e221" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.269393 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7knjf"] Oct 09 15:50:24 crc kubenswrapper[4762]: E1009 15:50:24.269920 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" containerName="telemetry-openstack-openstack-cell1" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.269941 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" containerName="telemetry-openstack-openstack-cell1" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.270172 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb" containerName="telemetry-openstack-openstack-cell1" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.271038 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.273251 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.280003 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.280299 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.280477 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.280602 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.303703 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7knjf"] Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327109 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgs5m\" (UniqueName: \"kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327177 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327227 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327270 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327337 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.327407 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.429345 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.429765 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgs5m\" (UniqueName: \"kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.429806 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.430628 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.430729 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.430858 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.435238 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.436855 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.439936 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.444042 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.445693 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.446253 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgs5m\" (UniqueName: \"kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m\") pod \"neutron-sriov-openstack-openstack-cell1-7knjf\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:24 crc kubenswrapper[4762]: I1009 15:50:24.594682 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:50:25 crc kubenswrapper[4762]: I1009 15:50:25.163779 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7knjf"] Oct 09 15:50:26 crc kubenswrapper[4762]: I1009 15:50:26.155148 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:50:26 crc kubenswrapper[4762]: I1009 15:50:26.193050 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" event={"ID":"55195197-ee16-4c06-aaf3-992e9fbba8c6","Type":"ContainerStarted","Data":"480335e9bd0e3d55ca70126dc372bfbc4ee3dd50d45c6de86a4dfd4673c23b81"} Oct 09 15:50:27 crc kubenswrapper[4762]: I1009 15:50:27.204611 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" event={"ID":"55195197-ee16-4c06-aaf3-992e9fbba8c6","Type":"ContainerStarted","Data":"307f934f4cb7bd1aab5f5bb5ee5ae2ede1fc404aa64b5db2c5eafb69d9e7380d"} Oct 09 15:50:27 crc kubenswrapper[4762]: I1009 15:50:27.229342 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" podStartSLOduration=2.688767067 podStartE2EDuration="3.229317337s" podCreationTimestamp="2025-10-09 15:50:24 +0000 UTC" firstStartedPulling="2025-10-09 15:50:25.609596736 +0000 UTC m=+8701.383387775" lastFinishedPulling="2025-10-09 15:50:26.150147016 +0000 UTC m=+8701.923938045" observedRunningTime="2025-10-09 15:50:27.221372509 +0000 UTC m=+8702.995163548" watchObservedRunningTime="2025-10-09 15:50:27.229317337 +0000 UTC m=+8703.003108376" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.473362 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.476378 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.486836 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.501913 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggrk\" (UniqueName: \"kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.502008 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.502122 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.604043 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggrk\" (UniqueName: \"kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.604105 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.604178 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.604879 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.604877 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.628967 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggrk\" (UniqueName: \"kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk\") pod \"community-operators-m6stv\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:48 crc kubenswrapper[4762]: I1009 15:51:48.818337 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:49 crc kubenswrapper[4762]: I1009 15:51:49.389893 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:51:50 crc kubenswrapper[4762]: I1009 15:51:50.057895 4762 generic.go:334] "Generic (PLEG): container finished" podID="b731399a-4214-48a6-ac48-9d9da54e0281" containerID="b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489" exitCode=0 Oct 09 15:51:50 crc kubenswrapper[4762]: I1009 15:51:50.057976 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerDied","Data":"b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489"} Oct 09 15:51:50 crc kubenswrapper[4762]: I1009 15:51:50.058643 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerStarted","Data":"bdbcb6b4cbe728fabd6ad6dbdc498dc684bb9550e3e3ef65e0171616c362ed4b"} Oct 09 15:51:50 crc kubenswrapper[4762]: I1009 15:51:50.060953 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 15:51:52 crc kubenswrapper[4762]: I1009 15:51:52.100085 4762 generic.go:334] "Generic (PLEG): container finished" podID="b731399a-4214-48a6-ac48-9d9da54e0281" containerID="ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5" exitCode=0 Oct 09 15:51:52 crc kubenswrapper[4762]: I1009 15:51:52.100250 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerDied","Data":"ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5"} Oct 09 15:51:53 crc kubenswrapper[4762]: I1009 15:51:53.111743 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerStarted","Data":"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941"} Oct 09 15:51:53 crc kubenswrapper[4762]: I1009 15:51:53.137861 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m6stv" podStartSLOduration=2.559645061 podStartE2EDuration="5.137840987s" podCreationTimestamp="2025-10-09 15:51:48 +0000 UTC" firstStartedPulling="2025-10-09 15:51:50.060696841 +0000 UTC m=+8785.834487880" lastFinishedPulling="2025-10-09 15:51:52.638892767 +0000 UTC m=+8788.412683806" observedRunningTime="2025-10-09 15:51:53.126785296 +0000 UTC m=+8788.900576355" watchObservedRunningTime="2025-10-09 15:51:53.137840987 +0000 UTC m=+8788.911632026" Oct 09 15:51:58 crc kubenswrapper[4762]: I1009 15:51:58.818559 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:58 crc kubenswrapper[4762]: I1009 15:51:58.818993 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:58 crc kubenswrapper[4762]: I1009 15:51:58.867119 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:59 crc kubenswrapper[4762]: I1009 15:51:59.216243 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:51:59 crc kubenswrapper[4762]: I1009 15:51:59.263099 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.185854 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m6stv" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="registry-server" containerID="cri-o://955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941" gracePeriod=2 Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.668392 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.727459 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content\") pod \"b731399a-4214-48a6-ac48-9d9da54e0281\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.727676 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mggrk\" (UniqueName: \"kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk\") pod \"b731399a-4214-48a6-ac48-9d9da54e0281\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.727781 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities\") pod \"b731399a-4214-48a6-ac48-9d9da54e0281\" (UID: \"b731399a-4214-48a6-ac48-9d9da54e0281\") " Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.729136 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities" (OuterVolumeSpecName: "utilities") pod "b731399a-4214-48a6-ac48-9d9da54e0281" (UID: "b731399a-4214-48a6-ac48-9d9da54e0281"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.733883 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk" (OuterVolumeSpecName: "kube-api-access-mggrk") pod "b731399a-4214-48a6-ac48-9d9da54e0281" (UID: "b731399a-4214-48a6-ac48-9d9da54e0281"). InnerVolumeSpecName "kube-api-access-mggrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.788280 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b731399a-4214-48a6-ac48-9d9da54e0281" (UID: "b731399a-4214-48a6-ac48-9d9da54e0281"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.830289 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mggrk\" (UniqueName: \"kubernetes.io/projected/b731399a-4214-48a6-ac48-9d9da54e0281-kube-api-access-mggrk\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.830487 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:01 crc kubenswrapper[4762]: I1009 15:52:01.830502 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b731399a-4214-48a6-ac48-9d9da54e0281-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.200270 4762 generic.go:334] "Generic (PLEG): container finished" podID="b731399a-4214-48a6-ac48-9d9da54e0281" containerID="955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941" exitCode=0 Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.200533 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m6stv" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.200566 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerDied","Data":"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941"} Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.203710 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m6stv" event={"ID":"b731399a-4214-48a6-ac48-9d9da54e0281","Type":"ContainerDied","Data":"bdbcb6b4cbe728fabd6ad6dbdc498dc684bb9550e3e3ef65e0171616c362ed4b"} Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.203748 4762 scope.go:117] "RemoveContainer" containerID="955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.238618 4762 scope.go:117] "RemoveContainer" containerID="ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.247350 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.257739 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m6stv"] Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.274510 4762 scope.go:117] "RemoveContainer" containerID="b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.324370 4762 scope.go:117] "RemoveContainer" containerID="955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941" Oct 09 15:52:02 crc kubenswrapper[4762]: E1009 15:52:02.324939 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941\": container with ID starting with 955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941 not found: ID does not exist" containerID="955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.324970 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941"} err="failed to get container status \"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941\": rpc error: code = NotFound desc = could not find container \"955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941\": container with ID starting with 955e5647cf8fb5792ca879f89c0ffa3a4cde3588c07beeecadc980c1b7388941 not found: ID does not exist" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.324992 4762 scope.go:117] "RemoveContainer" containerID="ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5" Oct 09 15:52:02 crc kubenswrapper[4762]: E1009 15:52:02.325630 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5\": container with ID starting with ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5 not found: ID does not exist" containerID="ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.325670 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5"} err="failed to get container status \"ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5\": rpc error: code = NotFound desc = could not find container \"ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5\": container with ID starting with ae274842a685a1a1690bb914a026fd8395fccde8b09681e629f3aaa8648334c5 not found: ID does not exist" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.325687 4762 scope.go:117] "RemoveContainer" containerID="b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489" Oct 09 15:52:02 crc kubenswrapper[4762]: E1009 15:52:02.326103 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489\": container with ID starting with b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489 not found: ID does not exist" containerID="b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.326158 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489"} err="failed to get container status \"b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489\": rpc error: code = NotFound desc = could not find container \"b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489\": container with ID starting with b473dfd5fb6a5a2c061d3713fff46fde18fe2a239e713b74096e7568f043f489 not found: ID does not exist" Oct 09 15:52:02 crc kubenswrapper[4762]: I1009 15:52:02.978283 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" path="/var/lib/kubelet/pods/b731399a-4214-48a6-ac48-9d9da54e0281/volumes" Oct 09 15:52:20 crc kubenswrapper[4762]: I1009 15:52:20.398684 4762 generic.go:334] "Generic (PLEG): container finished" podID="55195197-ee16-4c06-aaf3-992e9fbba8c6" containerID="307f934f4cb7bd1aab5f5bb5ee5ae2ede1fc404aa64b5db2c5eafb69d9e7380d" exitCode=0 Oct 09 15:52:20 crc kubenswrapper[4762]: I1009 15:52:20.398765 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" event={"ID":"55195197-ee16-4c06-aaf3-992e9fbba8c6","Type":"ContainerDied","Data":"307f934f4cb7bd1aab5f5bb5ee5ae2ede1fc404aa64b5db2c5eafb69d9e7380d"} Oct 09 15:52:21 crc kubenswrapper[4762]: I1009 15:52:21.889182 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.013593 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.013949 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.013999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.014084 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgs5m\" (UniqueName: \"kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.014136 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.014334 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory\") pod \"55195197-ee16-4c06-aaf3-992e9fbba8c6\" (UID: \"55195197-ee16-4c06-aaf3-992e9fbba8c6\") " Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.021123 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph" (OuterVolumeSpecName: "ceph") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.021392 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m" (OuterVolumeSpecName: "kube-api-access-sgs5m") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "kube-api-access-sgs5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.025917 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.052748 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory" (OuterVolumeSpecName: "inventory") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.053809 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.055090 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "55195197-ee16-4c06-aaf3-992e9fbba8c6" (UID: "55195197-ee16-4c06-aaf3-992e9fbba8c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117053 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117090 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117105 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117120 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117133 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgs5m\" (UniqueName: \"kubernetes.io/projected/55195197-ee16-4c06-aaf3-992e9fbba8c6-kube-api-access-sgs5m\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.117145 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/55195197-ee16-4c06-aaf3-992e9fbba8c6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.418969 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" event={"ID":"55195197-ee16-4c06-aaf3-992e9fbba8c6","Type":"ContainerDied","Data":"480335e9bd0e3d55ca70126dc372bfbc4ee3dd50d45c6de86a4dfd4673c23b81"} Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.419018 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="480335e9bd0e3d55ca70126dc372bfbc4ee3dd50d45c6de86a4dfd4673c23b81" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.419062 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7knjf" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512088 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8"] Oct 09 15:52:22 crc kubenswrapper[4762]: E1009 15:52:22.512546 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="extract-utilities" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512566 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="extract-utilities" Oct 09 15:52:22 crc kubenswrapper[4762]: E1009 15:52:22.512595 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55195197-ee16-4c06-aaf3-992e9fbba8c6" containerName="neutron-sriov-openstack-openstack-cell1" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512602 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="55195197-ee16-4c06-aaf3-992e9fbba8c6" containerName="neutron-sriov-openstack-openstack-cell1" Oct 09 15:52:22 crc kubenswrapper[4762]: E1009 15:52:22.512613 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="extract-content" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512619 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="extract-content" Oct 09 15:52:22 crc kubenswrapper[4762]: E1009 15:52:22.512628 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="registry-server" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512652 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="registry-server" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512900 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="55195197-ee16-4c06-aaf3-992e9fbba8c6" containerName="neutron-sriov-openstack-openstack-cell1" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.512917 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b731399a-4214-48a6-ac48-9d9da54e0281" containerName="registry-server" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.513847 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.515824 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.517074 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.517267 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.517490 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.517714 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.525469 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.525559 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.525854 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.526106 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.526159 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.526229 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7nd8\" (UniqueName: \"kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.535792 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8"] Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628187 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628623 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7nd8\" (UniqueName: \"kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628696 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628778 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628889 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.628979 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.633765 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.633877 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.634572 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.638280 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.639292 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.648769 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7nd8\" (UniqueName: \"kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8\") pod \"neutron-dhcp-openstack-openstack-cell1-v7kr8\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:22 crc kubenswrapper[4762]: I1009 15:52:22.836438 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:52:23 crc kubenswrapper[4762]: I1009 15:52:23.373395 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8"] Oct 09 15:52:23 crc kubenswrapper[4762]: I1009 15:52:23.432303 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" event={"ID":"c1c60f49-ddad-480e-b318-fbff83ab32a5","Type":"ContainerStarted","Data":"9c3ba880b21cbb5bea57355145cd45da4163ca5387d57b6dcf5c7c92b94eac80"} Oct 09 15:52:24 crc kubenswrapper[4762]: I1009 15:52:24.442819 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" event={"ID":"c1c60f49-ddad-480e-b318-fbff83ab32a5","Type":"ContainerStarted","Data":"5f293f7f0c1d08a588614d2f1dac6d538b02efda4aaf398bd038aff9ecde8b62"} Oct 09 15:52:41 crc kubenswrapper[4762]: I1009 15:52:41.969315 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:52:41 crc kubenswrapper[4762]: I1009 15:52:41.970747 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:53:11 crc kubenswrapper[4762]: I1009 15:53:11.969392 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:53:11 crc kubenswrapper[4762]: I1009 15:53:11.970929 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:53:41 crc kubenswrapper[4762]: I1009 15:53:41.970020 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 15:53:41 crc kubenswrapper[4762]: I1009 15:53:41.970696 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 15:53:41 crc kubenswrapper[4762]: I1009 15:53:41.970758 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 15:53:41 crc kubenswrapper[4762]: I1009 15:53:41.971535 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 15:53:41 crc kubenswrapper[4762]: I1009 15:53:41.971607 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" gracePeriod=600 Oct 09 15:53:42 crc kubenswrapper[4762]: E1009 15:53:42.091805 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:53:42 crc kubenswrapper[4762]: I1009 15:53:42.275279 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" exitCode=0 Oct 09 15:53:42 crc kubenswrapper[4762]: I1009 15:53:42.275377 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274"} Oct 09 15:53:42 crc kubenswrapper[4762]: I1009 15:53:42.276045 4762 scope.go:117] "RemoveContainer" containerID="608b8e79f6b83e7f1922401fcc1b3497cb9d1b136c003d543816f40098ca2be2" Oct 09 15:53:42 crc kubenswrapper[4762]: I1009 15:53:42.276979 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:53:42 crc kubenswrapper[4762]: E1009 15:53:42.277436 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:53:42 crc kubenswrapper[4762]: I1009 15:53:42.308864 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" podStartSLOduration=79.72862813 podStartE2EDuration="1m20.308844075s" podCreationTimestamp="2025-10-09 15:52:22 +0000 UTC" firstStartedPulling="2025-10-09 15:52:23.37900498 +0000 UTC m=+8819.152796009" lastFinishedPulling="2025-10-09 15:52:23.959220915 +0000 UTC m=+8819.733011954" observedRunningTime="2025-10-09 15:52:24.467125479 +0000 UTC m=+8820.240916518" watchObservedRunningTime="2025-10-09 15:53:42.308844075 +0000 UTC m=+8898.082635114" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.347674 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.351398 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.366100 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.452215 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.452334 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zsrh\" (UniqueName: \"kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.452815 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.556419 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.556557 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.556649 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zsrh\" (UniqueName: \"kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.557457 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.557475 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.584433 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zsrh\" (UniqueName: \"kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh\") pod \"redhat-operators-wsprv\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:52 crc kubenswrapper[4762]: I1009 15:53:52.685346 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.190492 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.383333 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerStarted","Data":"4d0fe39063389bad17dbcbb3061d2148bcde9f6ce9835607fa650b9e2eebe294"} Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.796708 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.800558 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.809681 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.991408 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v65j\" (UniqueName: \"kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.991615 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:53 crc kubenswrapper[4762]: I1009 15:53:53.991765 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.094590 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v65j\" (UniqueName: \"kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.094655 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.094685 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.095168 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.095267 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.122263 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v65j\" (UniqueName: \"kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j\") pod \"certified-operators-lglxn\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.144390 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.410704 4762 generic.go:334] "Generic (PLEG): container finished" podID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerID="2300c2cad2b47dd622a29e9060088f0efd3da687531793d4d4bda617bbcd76da" exitCode=0 Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.410788 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerDied","Data":"2300c2cad2b47dd622a29e9060088f0efd3da687531793d4d4bda617bbcd76da"} Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.695317 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:53:54 crc kubenswrapper[4762]: W1009 15:53:54.700352 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2dae392_a070_4104_b4dd_6d00c248a6c9.slice/crio-bd41a79ef3f0fc7e0c767e2caba6cb7057b423b0b4f37dc452cfcac1ff96690f WatchSource:0}: Error finding container bd41a79ef3f0fc7e0c767e2caba6cb7057b423b0b4f37dc452cfcac1ff96690f: Status 404 returned error can't find the container with id bd41a79ef3f0fc7e0c767e2caba6cb7057b423b0b4f37dc452cfcac1ff96690f Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.752129 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.755136 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.775936 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.813767 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pbdr\" (UniqueName: \"kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.814124 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.814194 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.917099 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.917177 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.917306 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pbdr\" (UniqueName: \"kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.917729 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.917833 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.940868 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pbdr\" (UniqueName: \"kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr\") pod \"redhat-marketplace-kd55b\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:54 crc kubenswrapper[4762]: I1009 15:53:54.977751 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:53:54 crc kubenswrapper[4762]: E1009 15:53:54.978397 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:53:55 crc kubenswrapper[4762]: I1009 15:53:55.202957 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:53:55 crc kubenswrapper[4762]: I1009 15:53:55.425155 4762 generic.go:334] "Generic (PLEG): container finished" podID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerID="783b79f06e83592b7baeb0581342bf4775e937ec335afb246c9bd0c46738a337" exitCode=0 Oct 09 15:53:55 crc kubenswrapper[4762]: I1009 15:53:55.425270 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerDied","Data":"783b79f06e83592b7baeb0581342bf4775e937ec335afb246c9bd0c46738a337"} Oct 09 15:53:55 crc kubenswrapper[4762]: I1009 15:53:55.425460 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerStarted","Data":"bd41a79ef3f0fc7e0c767e2caba6cb7057b423b0b4f37dc452cfcac1ff96690f"} Oct 09 15:53:55 crc kubenswrapper[4762]: W1009 15:53:55.717290 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82d15d3d_5080_48db_b9bd_65710b3f98ce.slice/crio-4b6194cd4cc0c786690f66b3e8c6fbe5ee8155b6cb9fe48e6b39743a3a5ce9c4 WatchSource:0}: Error finding container 4b6194cd4cc0c786690f66b3e8c6fbe5ee8155b6cb9fe48e6b39743a3a5ce9c4: Status 404 returned error can't find the container with id 4b6194cd4cc0c786690f66b3e8c6fbe5ee8155b6cb9fe48e6b39743a3a5ce9c4 Oct 09 15:53:55 crc kubenswrapper[4762]: I1009 15:53:55.734956 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:53:56 crc kubenswrapper[4762]: I1009 15:53:56.439709 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerStarted","Data":"d1ed606c0142ec7584aedfc789268f4428666014fcc9f1e7dfca62df7bc515e4"} Oct 09 15:53:56 crc kubenswrapper[4762]: I1009 15:53:56.441679 4762 generic.go:334] "Generic (PLEG): container finished" podID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerID="272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec" exitCode=0 Oct 09 15:53:56 crc kubenswrapper[4762]: I1009 15:53:56.441782 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerDied","Data":"272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec"} Oct 09 15:53:56 crc kubenswrapper[4762]: I1009 15:53:56.441834 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerStarted","Data":"4b6194cd4cc0c786690f66b3e8c6fbe5ee8155b6cb9fe48e6b39743a3a5ce9c4"} Oct 09 15:53:56 crc kubenswrapper[4762]: I1009 15:53:56.444774 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerStarted","Data":"e745ee8107de6d0062d7893ed237dd8d754df2fa59462d26f65fcfe5c36e3938"} Oct 09 15:53:58 crc kubenswrapper[4762]: I1009 15:53:58.470895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerStarted","Data":"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a"} Oct 09 15:53:58 crc kubenswrapper[4762]: I1009 15:53:58.473681 4762 generic.go:334] "Generic (PLEG): container finished" podID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerID="e745ee8107de6d0062d7893ed237dd8d754df2fa59462d26f65fcfe5c36e3938" exitCode=0 Oct 09 15:53:58 crc kubenswrapper[4762]: I1009 15:53:58.473726 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerDied","Data":"e745ee8107de6d0062d7893ed237dd8d754df2fa59462d26f65fcfe5c36e3938"} Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.528496 4762 generic.go:334] "Generic (PLEG): container finished" podID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerID="8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a" exitCode=0 Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.528588 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerDied","Data":"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a"} Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.532616 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerStarted","Data":"7dc791b4aa19d52d67ef1baba8cf220c33e5645f89c0d83a707f8b159c1b343a"} Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.535490 4762 generic.go:334] "Generic (PLEG): container finished" podID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerID="d1ed606c0142ec7584aedfc789268f4428666014fcc9f1e7dfca62df7bc515e4" exitCode=0 Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.535521 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerDied","Data":"d1ed606c0142ec7584aedfc789268f4428666014fcc9f1e7dfca62df7bc515e4"} Oct 09 15:54:00 crc kubenswrapper[4762]: I1009 15:54:00.605976 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lglxn" podStartSLOduration=3.755492752 podStartE2EDuration="7.605940201s" podCreationTimestamp="2025-10-09 15:53:53 +0000 UTC" firstStartedPulling="2025-10-09 15:53:55.428803276 +0000 UTC m=+8911.202594315" lastFinishedPulling="2025-10-09 15:53:59.279250725 +0000 UTC m=+8915.053041764" observedRunningTime="2025-10-09 15:54:00.599856612 +0000 UTC m=+8916.373647651" watchObservedRunningTime="2025-10-09 15:54:00.605940201 +0000 UTC m=+8916.379731240" Oct 09 15:54:01 crc kubenswrapper[4762]: I1009 15:54:01.545900 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerStarted","Data":"a070060c9d73d67f9e3ed834cf65681a6e6f794cfd4aa28d0ce531cb2f335710"} Oct 09 15:54:01 crc kubenswrapper[4762]: I1009 15:54:01.549393 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerStarted","Data":"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65"} Oct 09 15:54:01 crc kubenswrapper[4762]: I1009 15:54:01.598493 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wsprv" podStartSLOduration=3.033913606 podStartE2EDuration="9.598466642s" podCreationTimestamp="2025-10-09 15:53:52 +0000 UTC" firstStartedPulling="2025-10-09 15:53:54.414022074 +0000 UTC m=+8910.187813113" lastFinishedPulling="2025-10-09 15:54:00.97857511 +0000 UTC m=+8916.752366149" observedRunningTime="2025-10-09 15:54:01.569152916 +0000 UTC m=+8917.342943955" watchObservedRunningTime="2025-10-09 15:54:01.598466642 +0000 UTC m=+8917.372257681" Oct 09 15:54:01 crc kubenswrapper[4762]: I1009 15:54:01.612707 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kd55b" podStartSLOduration=2.851940303 podStartE2EDuration="7.612687664s" podCreationTimestamp="2025-10-09 15:53:54 +0000 UTC" firstStartedPulling="2025-10-09 15:53:56.4447579 +0000 UTC m=+8912.218548939" lastFinishedPulling="2025-10-09 15:54:01.205505261 +0000 UTC m=+8916.979296300" observedRunningTime="2025-10-09 15:54:01.602719333 +0000 UTC m=+8917.376510392" watchObservedRunningTime="2025-10-09 15:54:01.612687664 +0000 UTC m=+8917.386478703" Oct 09 15:54:02 crc kubenswrapper[4762]: I1009 15:54:02.686473 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:02 crc kubenswrapper[4762]: I1009 15:54:02.687106 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:03 crc kubenswrapper[4762]: I1009 15:54:03.755998 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wsprv" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="registry-server" probeResult="failure" output=< Oct 09 15:54:03 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:54:03 crc kubenswrapper[4762]: > Oct 09 15:54:04 crc kubenswrapper[4762]: I1009 15:54:04.144792 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:04 crc kubenswrapper[4762]: I1009 15:54:04.145103 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:05 crc kubenswrapper[4762]: I1009 15:54:05.191759 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-lglxn" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="registry-server" probeResult="failure" output=< Oct 09 15:54:05 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 15:54:05 crc kubenswrapper[4762]: > Oct 09 15:54:05 crc kubenswrapper[4762]: I1009 15:54:05.204206 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:05 crc kubenswrapper[4762]: I1009 15:54:05.204286 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:05 crc kubenswrapper[4762]: I1009 15:54:05.250747 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:09 crc kubenswrapper[4762]: I1009 15:54:09.967698 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:54:09 crc kubenswrapper[4762]: E1009 15:54:09.968921 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:54:12 crc kubenswrapper[4762]: I1009 15:54:12.737564 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:12 crc kubenswrapper[4762]: I1009 15:54:12.790180 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:12 crc kubenswrapper[4762]: I1009 15:54:12.984115 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:54:14 crc kubenswrapper[4762]: I1009 15:54:14.210229 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:14 crc kubenswrapper[4762]: I1009 15:54:14.258527 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:14 crc kubenswrapper[4762]: I1009 15:54:14.696088 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wsprv" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="registry-server" containerID="cri-o://a070060c9d73d67f9e3ed834cf65681a6e6f794cfd4aa28d0ce531cb2f335710" gracePeriod=2 Oct 09 15:54:15 crc kubenswrapper[4762]: I1009 15:54:15.252973 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:15 crc kubenswrapper[4762]: I1009 15:54:15.382288 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:54:15 crc kubenswrapper[4762]: I1009 15:54:15.706478 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lglxn" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="registry-server" containerID="cri-o://7dc791b4aa19d52d67ef1baba8cf220c33e5645f89c0d83a707f8b159c1b343a" gracePeriod=2 Oct 09 15:54:16 crc kubenswrapper[4762]: I1009 15:54:16.720956 4762 generic.go:334] "Generic (PLEG): container finished" podID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerID="a070060c9d73d67f9e3ed834cf65681a6e6f794cfd4aa28d0ce531cb2f335710" exitCode=0 Oct 09 15:54:16 crc kubenswrapper[4762]: I1009 15:54:16.721049 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerDied","Data":"a070060c9d73d67f9e3ed834cf65681a6e6f794cfd4aa28d0ce531cb2f335710"} Oct 09 15:54:16 crc kubenswrapper[4762]: I1009 15:54:16.725044 4762 generic.go:334] "Generic (PLEG): container finished" podID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerID="7dc791b4aa19d52d67ef1baba8cf220c33e5645f89c0d83a707f8b159c1b343a" exitCode=0 Oct 09 15:54:16 crc kubenswrapper[4762]: I1009 15:54:16.725086 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerDied","Data":"7dc791b4aa19d52d67ef1baba8cf220c33e5645f89c0d83a707f8b159c1b343a"} Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.586659 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.587552 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kd55b" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="registry-server" containerID="cri-o://ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65" gracePeriod=2 Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.723521 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.730484 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.739097 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wsprv" event={"ID":"4dce696d-31b4-4482-b0b9-a067a1d77877","Type":"ContainerDied","Data":"4d0fe39063389bad17dbcbb3061d2148bcde9f6ce9835607fa650b9e2eebe294"} Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.739456 4762 scope.go:117] "RemoveContainer" containerID="a070060c9d73d67f9e3ed834cf65681a6e6f794cfd4aa28d0ce531cb2f335710" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.739126 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wsprv" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.741800 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lglxn" event={"ID":"c2dae392-a070-4104-b4dd-6d00c248a6c9","Type":"ContainerDied","Data":"bd41a79ef3f0fc7e0c767e2caba6cb7057b423b0b4f37dc452cfcac1ff96690f"} Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.741892 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lglxn" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.775147 4762 scope.go:117] "RemoveContainer" containerID="d1ed606c0142ec7584aedfc789268f4428666014fcc9f1e7dfca62df7bc515e4" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.807032 4762 scope.go:117] "RemoveContainer" containerID="2300c2cad2b47dd622a29e9060088f0efd3da687531793d4d4bda617bbcd76da" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.862545 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content\") pod \"4dce696d-31b4-4482-b0b9-a067a1d77877\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.862745 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities\") pod \"c2dae392-a070-4104-b4dd-6d00c248a6c9\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.862768 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities\") pod \"4dce696d-31b4-4482-b0b9-a067a1d77877\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.862870 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zsrh\" (UniqueName: \"kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh\") pod \"4dce696d-31b4-4482-b0b9-a067a1d77877\" (UID: \"4dce696d-31b4-4482-b0b9-a067a1d77877\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.862904 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v65j\" (UniqueName: \"kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j\") pod \"c2dae392-a070-4104-b4dd-6d00c248a6c9\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.863016 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content\") pod \"c2dae392-a070-4104-b4dd-6d00c248a6c9\" (UID: \"c2dae392-a070-4104-b4dd-6d00c248a6c9\") " Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.863621 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities" (OuterVolumeSpecName: "utilities") pod "c2dae392-a070-4104-b4dd-6d00c248a6c9" (UID: "c2dae392-a070-4104-b4dd-6d00c248a6c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.864339 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities" (OuterVolumeSpecName: "utilities") pod "4dce696d-31b4-4482-b0b9-a067a1d77877" (UID: "4dce696d-31b4-4482-b0b9-a067a1d77877"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.869218 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh" (OuterVolumeSpecName: "kube-api-access-8zsrh") pod "4dce696d-31b4-4482-b0b9-a067a1d77877" (UID: "4dce696d-31b4-4482-b0b9-a067a1d77877"). InnerVolumeSpecName "kube-api-access-8zsrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.869360 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j" (OuterVolumeSpecName: "kube-api-access-9v65j") pod "c2dae392-a070-4104-b4dd-6d00c248a6c9" (UID: "c2dae392-a070-4104-b4dd-6d00c248a6c9"). InnerVolumeSpecName "kube-api-access-9v65j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.890844 4762 scope.go:117] "RemoveContainer" containerID="7dc791b4aa19d52d67ef1baba8cf220c33e5645f89c0d83a707f8b159c1b343a" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.910339 4762 scope.go:117] "RemoveContainer" containerID="e745ee8107de6d0062d7893ed237dd8d754df2fa59462d26f65fcfe5c36e3938" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.914976 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2dae392-a070-4104-b4dd-6d00c248a6c9" (UID: "c2dae392-a070-4104-b4dd-6d00c248a6c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.929205 4762 scope.go:117] "RemoveContainer" containerID="783b79f06e83592b7baeb0581342bf4775e937ec335afb246c9bd0c46738a337" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.948535 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4dce696d-31b4-4482-b0b9-a067a1d77877" (UID: "4dce696d-31b4-4482-b0b9-a067a1d77877"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.965919 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.965961 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.965973 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zsrh\" (UniqueName: \"kubernetes.io/projected/4dce696d-31b4-4482-b0b9-a067a1d77877-kube-api-access-8zsrh\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.965994 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v65j\" (UniqueName: \"kubernetes.io/projected/c2dae392-a070-4104-b4dd-6d00c248a6c9-kube-api-access-9v65j\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.966007 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2dae392-a070-4104-b4dd-6d00c248a6c9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:17 crc kubenswrapper[4762]: I1009 15:54:17.966020 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dce696d-31b4-4482-b0b9-a067a1d77877-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.085965 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.102936 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wsprv"] Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.112534 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.122820 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lglxn"] Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.978317 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" path="/var/lib/kubelet/pods/4dce696d-31b4-4482-b0b9-a067a1d77877/volumes" Oct 09 15:54:18 crc kubenswrapper[4762]: I1009 15:54:18.979725 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" path="/var/lib/kubelet/pods/c2dae392-a070-4104-b4dd-6d00c248a6c9/volumes" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.473889 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.611782 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pbdr\" (UniqueName: \"kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr\") pod \"82d15d3d-5080-48db-b9bd-65710b3f98ce\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.612141 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities\") pod \"82d15d3d-5080-48db-b9bd-65710b3f98ce\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.612292 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content\") pod \"82d15d3d-5080-48db-b9bd-65710b3f98ce\" (UID: \"82d15d3d-5080-48db-b9bd-65710b3f98ce\") " Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.613394 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities" (OuterVolumeSpecName: "utilities") pod "82d15d3d-5080-48db-b9bd-65710b3f98ce" (UID: "82d15d3d-5080-48db-b9bd-65710b3f98ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.618693 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr" (OuterVolumeSpecName: "kube-api-access-2pbdr") pod "82d15d3d-5080-48db-b9bd-65710b3f98ce" (UID: "82d15d3d-5080-48db-b9bd-65710b3f98ce"). InnerVolumeSpecName "kube-api-access-2pbdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.627405 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82d15d3d-5080-48db-b9bd-65710b3f98ce" (UID: "82d15d3d-5080-48db-b9bd-65710b3f98ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.715257 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.715294 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pbdr\" (UniqueName: \"kubernetes.io/projected/82d15d3d-5080-48db-b9bd-65710b3f98ce-kube-api-access-2pbdr\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.715308 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82d15d3d-5080-48db-b9bd-65710b3f98ce-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.766481 4762 generic.go:334] "Generic (PLEG): container finished" podID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerID="ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65" exitCode=0 Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.766525 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerDied","Data":"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65"} Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.766556 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kd55b" event={"ID":"82d15d3d-5080-48db-b9bd-65710b3f98ce","Type":"ContainerDied","Data":"4b6194cd4cc0c786690f66b3e8c6fbe5ee8155b6cb9fe48e6b39743a3a5ce9c4"} Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.766558 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kd55b" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.766576 4762 scope.go:117] "RemoveContainer" containerID="ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.789903 4762 scope.go:117] "RemoveContainer" containerID="8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.806539 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.816654 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kd55b"] Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.851439 4762 scope.go:117] "RemoveContainer" containerID="272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.879106 4762 scope.go:117] "RemoveContainer" containerID="ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65" Oct 09 15:54:19 crc kubenswrapper[4762]: E1009 15:54:19.879527 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65\": container with ID starting with ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65 not found: ID does not exist" containerID="ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.879583 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65"} err="failed to get container status \"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65\": rpc error: code = NotFound desc = could not find container \"ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65\": container with ID starting with ba51cdb7b430c59e4b617b1c5f72924863a7db43f9e5be5261c642478870aa65 not found: ID does not exist" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.879623 4762 scope.go:117] "RemoveContainer" containerID="8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a" Oct 09 15:54:19 crc kubenswrapper[4762]: E1009 15:54:19.880421 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a\": container with ID starting with 8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a not found: ID does not exist" containerID="8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.880455 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a"} err="failed to get container status \"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a\": rpc error: code = NotFound desc = could not find container \"8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a\": container with ID starting with 8ecfb53f0d738cf0514ccd89e4d15f4f99d43c9f95b5cf36eebc84ac4a92970a not found: ID does not exist" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.880477 4762 scope.go:117] "RemoveContainer" containerID="272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec" Oct 09 15:54:19 crc kubenswrapper[4762]: E1009 15:54:19.880809 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec\": container with ID starting with 272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec not found: ID does not exist" containerID="272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec" Oct 09 15:54:19 crc kubenswrapper[4762]: I1009 15:54:19.880833 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec"} err="failed to get container status \"272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec\": rpc error: code = NotFound desc = could not find container \"272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec\": container with ID starting with 272e2014e5a2b56955bc43681fe15f1ba053a23888dede01ea7da9b39747f3ec not found: ID does not exist" Oct 09 15:54:20 crc kubenswrapper[4762]: I1009 15:54:20.979333 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" path="/var/lib/kubelet/pods/82d15d3d-5080-48db-b9bd-65710b3f98ce/volumes" Oct 09 15:54:24 crc kubenswrapper[4762]: I1009 15:54:24.974483 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:54:24 crc kubenswrapper[4762]: E1009 15:54:24.975315 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:54:37 crc kubenswrapper[4762]: I1009 15:54:37.965987 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:54:37 crc kubenswrapper[4762]: E1009 15:54:37.967490 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:54:50 crc kubenswrapper[4762]: I1009 15:54:50.965686 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:54:50 crc kubenswrapper[4762]: E1009 15:54:50.966419 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:55:02 crc kubenswrapper[4762]: I1009 15:55:02.965538 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:55:02 crc kubenswrapper[4762]: E1009 15:55:02.966949 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:55:16 crc kubenswrapper[4762]: I1009 15:55:16.966111 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:55:16 crc kubenswrapper[4762]: E1009 15:55:16.966983 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:55:30 crc kubenswrapper[4762]: I1009 15:55:30.966998 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:55:30 crc kubenswrapper[4762]: E1009 15:55:30.967738 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:55:45 crc kubenswrapper[4762]: I1009 15:55:45.965109 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:55:45 crc kubenswrapper[4762]: E1009 15:55:45.966033 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:55:58 crc kubenswrapper[4762]: I1009 15:55:58.966198 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:55:58 crc kubenswrapper[4762]: E1009 15:55:58.969137 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:56:09 crc kubenswrapper[4762]: I1009 15:56:09.896521 4762 generic.go:334] "Generic (PLEG): container finished" podID="c1c60f49-ddad-480e-b318-fbff83ab32a5" containerID="5f293f7f0c1d08a588614d2f1dac6d538b02efda4aaf398bd038aff9ecde8b62" exitCode=0 Oct 09 15:56:09 crc kubenswrapper[4762]: I1009 15:56:09.896612 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" event={"ID":"c1c60f49-ddad-480e-b318-fbff83ab32a5","Type":"ContainerDied","Data":"5f293f7f0c1d08a588614d2f1dac6d538b02efda4aaf398bd038aff9ecde8b62"} Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.454427 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.641892 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.642003 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.642034 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.642169 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7nd8\" (UniqueName: \"kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.642207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.642239 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0\") pod \"c1c60f49-ddad-480e-b318-fbff83ab32a5\" (UID: \"c1c60f49-ddad-480e-b318-fbff83ab32a5\") " Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.647503 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8" (OuterVolumeSpecName: "kube-api-access-s7nd8") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "kube-api-access-s7nd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.647673 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph" (OuterVolumeSpecName: "ceph") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.648625 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.671592 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.672172 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory" (OuterVolumeSpecName: "inventory") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.674217 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c1c60f49-ddad-480e-b318-fbff83ab32a5" (UID: "c1c60f49-ddad-480e-b318-fbff83ab32a5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744364 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7nd8\" (UniqueName: \"kubernetes.io/projected/c1c60f49-ddad-480e-b318-fbff83ab32a5-kube-api-access-s7nd8\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744396 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744406 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744416 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744424 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.744432 4762 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c60f49-ddad-480e-b318-fbff83ab32a5-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.918429 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" event={"ID":"c1c60f49-ddad-480e-b318-fbff83ab32a5","Type":"ContainerDied","Data":"9c3ba880b21cbb5bea57355145cd45da4163ca5387d57b6dcf5c7c92b94eac80"} Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.918474 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c3ba880b21cbb5bea57355145cd45da4163ca5387d57b6dcf5c7c92b94eac80" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.918481 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-v7kr8" Oct 09 15:56:11 crc kubenswrapper[4762]: I1009 15:56:11.965939 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:56:11 crc kubenswrapper[4762]: E1009 15:56:11.966232 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:56:19 crc kubenswrapper[4762]: I1009 15:56:19.041376 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:19 crc kubenswrapper[4762]: I1009 15:56:19.043185 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" containerName="nova-cell0-conductor-conductor" containerID="cri-o://7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0" gracePeriod=30 Oct 09 15:56:19 crc kubenswrapper[4762]: I1009 15:56:19.066758 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:19 crc kubenswrapper[4762]: I1009 15:56:19.067030 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1312067d749b99aa2f44938ef957c3164f311ce78be283ecede708f2bdfa44ca" gracePeriod=30 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.003030 4762 generic.go:334] "Generic (PLEG): container finished" podID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" containerID="1312067d749b99aa2f44938ef957c3164f311ce78be283ecede708f2bdfa44ca" exitCode=0 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.003574 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a","Type":"ContainerDied","Data":"1312067d749b99aa2f44938ef957c3164f311ce78be283ecede708f2bdfa44ca"} Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.260793 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.426608 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle\") pod \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.426893 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data\") pod \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.427022 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87s9w\" (UniqueName: \"kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w\") pod \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\" (UID: \"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a\") " Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.436989 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w" (OuterVolumeSpecName: "kube-api-access-87s9w") pod "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" (UID: "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a"). InnerVolumeSpecName "kube-api-access-87s9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.465789 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" (UID: "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.473480 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data" (OuterVolumeSpecName: "config-data") pod "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" (UID: "3fdff501-cb37-47c4-92c7-f7db3c9c4c5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.529565 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.529608 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87s9w\" (UniqueName: \"kubernetes.io/projected/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-kube-api-access-87s9w\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.529625 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.695325 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.695718 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-log" containerID="cri-o://962de3aef6def46549360f3931b75c5d2559742f3be1a1bb3ad211b71e156e39" gracePeriod=30 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.696044 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-api" containerID="cri-o://cb38c41a804e3cabe8de7033c6b2ddfb8dbc929246868ac398768e832835b202" gracePeriod=30 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.710164 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.710428 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e38ba711-b6dd-42aa-b526-3c170fea1b48" containerName="nova-scheduler-scheduler" containerID="cri-o://15cb2c91808e21b1ad1a1979319a478cd829ba0b947ab509503e46c9bf29f406" gracePeriod=30 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.773388 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.774004 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" containerID="cri-o://87cff1325fecaf8e8cfdd99e4c8c9bd4f23a4940e582f3edfc790b4c7e3e38fb" gracePeriod=30 Oct 09 15:56:20 crc kubenswrapper[4762]: I1009 15:56:20.774186 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" containerID="cri-o://67600f189fe2409693d2e29080955b9f8156a17d49cb0308f7b361f62a93328b" gracePeriod=30 Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.021961 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3fdff501-cb37-47c4-92c7-f7db3c9c4c5a","Type":"ContainerDied","Data":"8c5225f1ed1c2dd5f9b8bd051f1b2a746f76b3ad9ee8758ee471811c43a98136"} Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.022016 4762 scope.go:117] "RemoveContainer" containerID="1312067d749b99aa2f44938ef957c3164f311ce78be283ecede708f2bdfa44ca" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.022165 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.029740 4762 generic.go:334] "Generic (PLEG): container finished" podID="66dac335-a776-429f-a37b-56ec1691e0c8" containerID="87cff1325fecaf8e8cfdd99e4c8c9bd4f23a4940e582f3edfc790b4c7e3e38fb" exitCode=143 Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.029835 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerDied","Data":"87cff1325fecaf8e8cfdd99e4c8c9bd4f23a4940e582f3edfc790b4c7e3e38fb"} Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.032603 4762 generic.go:334] "Generic (PLEG): container finished" podID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerID="962de3aef6def46549360f3931b75c5d2559742f3be1a1bb3ad211b71e156e39" exitCode=143 Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.032686 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerDied","Data":"962de3aef6def46549360f3931b75c5d2559742f3be1a1bb3ad211b71e156e39"} Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.138657 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.159438 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.174351 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.174926 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.174943 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.174960 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.174968 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.174978 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.174984 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="extract-content" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.174999 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175005 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175022 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c60f49-ddad-480e-b318-fbff83ab32a5" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175029 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c60f49-ddad-480e-b318-fbff83ab32a5" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175045 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175052 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175067 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175074 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175087 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175092 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175106 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175113 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175124 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175130 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="extract-utilities" Oct 09 15:56:21 crc kubenswrapper[4762]: E1009 15:56:21.175142 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" containerName="nova-cell1-conductor-conductor" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175149 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" containerName="nova-cell1-conductor-conductor" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175365 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" containerName="nova-cell1-conductor-conductor" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175378 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dce696d-31b4-4482-b0b9-a067a1d77877" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175388 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2dae392-a070-4104-b4dd-6d00c248a6c9" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175398 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c60f49-ddad-480e-b318-fbff83ab32a5" containerName="neutron-dhcp-openstack-openstack-cell1" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.175407 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="82d15d3d-5080-48db-b9bd-65710b3f98ce" containerName="registry-server" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.176282 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.182807 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.185683 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.348310 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.348627 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.348695 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcsbp\" (UniqueName: \"kubernetes.io/projected/ad631956-f573-4fcc-bfda-bbfece4bae8c-kube-api-access-fcsbp\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.451299 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.451390 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcsbp\" (UniqueName: \"kubernetes.io/projected/ad631956-f573-4fcc-bfda-bbfece4bae8c-kube-api-access-fcsbp\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.451587 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.457332 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.457476 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad631956-f573-4fcc-bfda-bbfece4bae8c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.469560 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcsbp\" (UniqueName: \"kubernetes.io/projected/ad631956-f573-4fcc-bfda-bbfece4bae8c-kube-api-access-fcsbp\") pod \"nova-cell1-conductor-0\" (UID: \"ad631956-f573-4fcc-bfda-bbfece4bae8c\") " pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.512225 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.533182 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.661597 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9f4k\" (UniqueName: \"kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k\") pod \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.661980 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data\") pod \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.662007 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle\") pod \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\" (UID: \"cb5ae0d7-b04d-4fef-bbb1-66cb10260905\") " Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.697469 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k" (OuterVolumeSpecName: "kube-api-access-f9f4k") pod "cb5ae0d7-b04d-4fef-bbb1-66cb10260905" (UID: "cb5ae0d7-b04d-4fef-bbb1-66cb10260905"). InnerVolumeSpecName "kube-api-access-f9f4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.705828 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data" (OuterVolumeSpecName: "config-data") pod "cb5ae0d7-b04d-4fef-bbb1-66cb10260905" (UID: "cb5ae0d7-b04d-4fef-bbb1-66cb10260905"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.709429 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb5ae0d7-b04d-4fef-bbb1-66cb10260905" (UID: "cb5ae0d7-b04d-4fef-bbb1-66cb10260905"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.764041 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.764088 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:21 crc kubenswrapper[4762]: I1009 15:56:21.764103 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9f4k\" (UniqueName: \"kubernetes.io/projected/cb5ae0d7-b04d-4fef-bbb1-66cb10260905-kube-api-access-f9f4k\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.037900 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.044199 4762 generic.go:334] "Generic (PLEG): container finished" podID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" containerID="7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0" exitCode=0 Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.044280 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cb5ae0d7-b04d-4fef-bbb1-66cb10260905","Type":"ContainerDied","Data":"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0"} Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.044289 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.044315 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cb5ae0d7-b04d-4fef-bbb1-66cb10260905","Type":"ContainerDied","Data":"0c4fd615b33fe42b573c4d373f0a9abfaf58be334a657ac3df499fca11252ba1"} Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.044337 4762 scope.go:117] "RemoveContainer" containerID="7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.152046 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.178377 4762 scope.go:117] "RemoveContainer" containerID="7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.181542 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:22 crc kubenswrapper[4762]: E1009 15:56:22.183112 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0\": container with ID starting with 7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0 not found: ID does not exist" containerID="7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.183171 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0"} err="failed to get container status \"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0\": rpc error: code = NotFound desc = could not find container \"7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0\": container with ID starting with 7e564df4f511d5a6d19fd4a48e7ba297a305b918430d4b0ee25d9d5fe84570f0 not found: ID does not exist" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.193988 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:22 crc kubenswrapper[4762]: E1009 15:56:22.194843 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" containerName="nova-cell0-conductor-conductor" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.194945 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" containerName="nova-cell0-conductor-conductor" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.195380 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" containerName="nova-cell0-conductor-conductor" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.197788 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.199878 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.210796 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.383721 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.383795 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt9w9\" (UniqueName: \"kubernetes.io/projected/892e2b35-4076-4ae7-b81c-95beca001408-kube-api-access-pt9w9\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.383968 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.490452 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.490860 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.490920 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt9w9\" (UniqueName: \"kubernetes.io/projected/892e2b35-4076-4ae7-b81c-95beca001408-kube-api-access-pt9w9\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.499529 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.500684 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/892e2b35-4076-4ae7-b81c-95beca001408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.520420 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt9w9\" (UniqueName: \"kubernetes.io/projected/892e2b35-4076-4ae7-b81c-95beca001408-kube-api-access-pt9w9\") pod \"nova-cell0-conductor-0\" (UID: \"892e2b35-4076-4ae7-b81c-95beca001408\") " pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.561860 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.981783 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fdff501-cb37-47c4-92c7-f7db3c9c4c5a" path="/var/lib/kubelet/pods/3fdff501-cb37-47c4-92c7-f7db3c9c4c5a/volumes" Oct 09 15:56:22 crc kubenswrapper[4762]: I1009 15:56:22.983273 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb5ae0d7-b04d-4fef-bbb1-66cb10260905" path="/var/lib/kubelet/pods/cb5ae0d7-b04d-4fef-bbb1-66cb10260905/volumes" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.051390 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.061732 4762 generic.go:334] "Generic (PLEG): container finished" podID="e38ba711-b6dd-42aa-b526-3c170fea1b48" containerID="15cb2c91808e21b1ad1a1979319a478cd829ba0b947ab509503e46c9bf29f406" exitCode=0 Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.061830 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e38ba711-b6dd-42aa-b526-3c170fea1b48","Type":"ContainerDied","Data":"15cb2c91808e21b1ad1a1979319a478cd829ba0b947ab509503e46c9bf29f406"} Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.063721 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ad631956-f573-4fcc-bfda-bbfece4bae8c","Type":"ContainerStarted","Data":"398165f8108f9e7b16b1a7f187838ef68c30d6f6491ae731335468e49734a3d1"} Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.063750 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ad631956-f573-4fcc-bfda-bbfece4bae8c","Type":"ContainerStarted","Data":"71e8c72e8168982e64a45c5399391c74d5b304d7f68744f35751d5e0fdd6bef6"} Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.064259 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.107781 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.107761013 podStartE2EDuration="2.107761013s" podCreationTimestamp="2025-10-09 15:56:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:56:23.089464105 +0000 UTC m=+9058.863255144" watchObservedRunningTime="2025-10-09 15:56:23.107761013 +0000 UTC m=+9058.881552052" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.251006 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.409224 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data\") pod \"e38ba711-b6dd-42aa-b526-3c170fea1b48\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.409289 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle\") pod \"e38ba711-b6dd-42aa-b526-3c170fea1b48\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.409486 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68k77\" (UniqueName: \"kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77\") pod \"e38ba711-b6dd-42aa-b526-3c170fea1b48\" (UID: \"e38ba711-b6dd-42aa-b526-3c170fea1b48\") " Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.413965 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77" (OuterVolumeSpecName: "kube-api-access-68k77") pod "e38ba711-b6dd-42aa-b526-3c170fea1b48" (UID: "e38ba711-b6dd-42aa-b526-3c170fea1b48"). InnerVolumeSpecName "kube-api-access-68k77". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.439472 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e38ba711-b6dd-42aa-b526-3c170fea1b48" (UID: "e38ba711-b6dd-42aa-b526-3c170fea1b48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.440763 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data" (OuterVolumeSpecName: "config-data") pod "e38ba711-b6dd-42aa-b526-3c170fea1b48" (UID: "e38ba711-b6dd-42aa-b526-3c170fea1b48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.513260 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.513307 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e38ba711-b6dd-42aa-b526-3c170fea1b48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.513320 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68k77\" (UniqueName: \"kubernetes.io/projected/e38ba711-b6dd-42aa-b526-3c170fea1b48-kube-api-access-68k77\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.926553 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": read tcp 10.217.0.2:51752->10.217.1.88:8775: read: connection reset by peer" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.926875 4762 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.88:8775/\": read tcp 10.217.0.2:51768->10.217.1.88:8775: read: connection reset by peer" Oct 09 15:56:23 crc kubenswrapper[4762]: I1009 15:56:23.965191 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:56:23 crc kubenswrapper[4762]: E1009 15:56:23.965668 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.079510 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e38ba711-b6dd-42aa-b526-3c170fea1b48","Type":"ContainerDied","Data":"bf5178fe8e51d8f223e9f554ca8535b71a24ade323b47701942c852465c71230"} Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.079565 4762 scope.go:117] "RemoveContainer" containerID="15cb2c91808e21b1ad1a1979319a478cd829ba0b947ab509503e46c9bf29f406" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.079570 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.082841 4762 generic.go:334] "Generic (PLEG): container finished" podID="66dac335-a776-429f-a37b-56ec1691e0c8" containerID="67600f189fe2409693d2e29080955b9f8156a17d49cb0308f7b361f62a93328b" exitCode=0 Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.082902 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerDied","Data":"67600f189fe2409693d2e29080955b9f8156a17d49cb0308f7b361f62a93328b"} Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.085252 4762 generic.go:334] "Generic (PLEG): container finished" podID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerID="cb38c41a804e3cabe8de7033c6b2ddfb8dbc929246868ac398768e832835b202" exitCode=0 Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.085301 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerDied","Data":"cb38c41a804e3cabe8de7033c6b2ddfb8dbc929246868ac398768e832835b202"} Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.092357 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"892e2b35-4076-4ae7-b81c-95beca001408","Type":"ContainerStarted","Data":"da3bc6bc3cc1d3ff7f6969bc5e9f4421a80ae3f64fabe0ed94801588ce3092c4"} Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.092405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"892e2b35-4076-4ae7-b81c-95beca001408","Type":"ContainerStarted","Data":"ddcf61a8149f6c50bd7fb1378c210293b6d8ba04cfc92601a29060f10c890f40"} Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.092431 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.128784 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.128761799 podStartE2EDuration="2.128761799s" podCreationTimestamp="2025-10-09 15:56:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:56:24.123140282 +0000 UTC m=+9059.896931341" watchObservedRunningTime="2025-10-09 15:56:24.128761799 +0000 UTC m=+9059.902552838" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.153857 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.167967 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.182193 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:24 crc kubenswrapper[4762]: E1009 15:56:24.182858 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38ba711-b6dd-42aa-b526-3c170fea1b48" containerName="nova-scheduler-scheduler" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.182882 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38ba711-b6dd-42aa-b526-3c170fea1b48" containerName="nova-scheduler-scheduler" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.183164 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="e38ba711-b6dd-42aa-b526-3c170fea1b48" containerName="nova-scheduler-scheduler" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.184192 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.195591 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.221047 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.232972 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-config-data\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.233023 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.233177 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67btk\" (UniqueName: \"kubernetes.io/projected/eafc81b3-49af-41be-9689-964ee18e7fc7-kube-api-access-67btk\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.335058 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-config-data\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.335404 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.335537 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67btk\" (UniqueName: \"kubernetes.io/projected/eafc81b3-49af-41be-9689-964ee18e7fc7-kube-api-access-67btk\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.342973 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-config-data\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.352343 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eafc81b3-49af-41be-9689-964ee18e7fc7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.360565 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67btk\" (UniqueName: \"kubernetes.io/projected/eafc81b3-49af-41be-9689-964ee18e7fc7-kube-api-access-67btk\") pod \"nova-scheduler-0\" (UID: \"eafc81b3-49af-41be-9689-964ee18e7fc7\") " pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.423534 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.447127 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs\") pod \"f962d3eb-c1dd-4738-97a9-971f450fef59\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.447916 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle\") pod \"f962d3eb-c1dd-4738-97a9-971f450fef59\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.447977 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data\") pod \"f962d3eb-c1dd-4738-97a9-971f450fef59\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.448187 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5j6p\" (UniqueName: \"kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p\") pod \"f962d3eb-c1dd-4738-97a9-971f450fef59\" (UID: \"f962d3eb-c1dd-4738-97a9-971f450fef59\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.448244 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs" (OuterVolumeSpecName: "logs") pod "f962d3eb-c1dd-4738-97a9-971f450fef59" (UID: "f962d3eb-c1dd-4738-97a9-971f450fef59"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.448762 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f962d3eb-c1dd-4738-97a9-971f450fef59-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.453860 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p" (OuterVolumeSpecName: "kube-api-access-x5j6p") pod "f962d3eb-c1dd-4738-97a9-971f450fef59" (UID: "f962d3eb-c1dd-4738-97a9-971f450fef59"). InnerVolumeSpecName "kube-api-access-x5j6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.486914 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.510899 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f962d3eb-c1dd-4738-97a9-971f450fef59" (UID: "f962d3eb-c1dd-4738-97a9-971f450fef59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.523767 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data" (OuterVolumeSpecName: "config-data") pod "f962d3eb-c1dd-4738-97a9-971f450fef59" (UID: "f962d3eb-c1dd-4738-97a9-971f450fef59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.536019 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.550727 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs\") pod \"66dac335-a776-429f-a37b-56ec1691e0c8\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.550867 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle\") pod \"66dac335-a776-429f-a37b-56ec1691e0c8\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.551115 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs" (OuterVolumeSpecName: "logs") pod "66dac335-a776-429f-a37b-56ec1691e0c8" (UID: "66dac335-a776-429f-a37b-56ec1691e0c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.551131 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data\") pod \"66dac335-a776-429f-a37b-56ec1691e0c8\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.551304 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-469dz\" (UniqueName: \"kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz\") pod \"66dac335-a776-429f-a37b-56ec1691e0c8\" (UID: \"66dac335-a776-429f-a37b-56ec1691e0c8\") " Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.552340 4762 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66dac335-a776-429f-a37b-56ec1691e0c8-logs\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.552369 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.552387 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f962d3eb-c1dd-4738-97a9-971f450fef59-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.552399 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5j6p\" (UniqueName: \"kubernetes.io/projected/f962d3eb-c1dd-4738-97a9-971f450fef59-kube-api-access-x5j6p\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.555604 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz" (OuterVolumeSpecName: "kube-api-access-469dz") pod "66dac335-a776-429f-a37b-56ec1691e0c8" (UID: "66dac335-a776-429f-a37b-56ec1691e0c8"). InnerVolumeSpecName "kube-api-access-469dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.594001 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66dac335-a776-429f-a37b-56ec1691e0c8" (UID: "66dac335-a776-429f-a37b-56ec1691e0c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.602765 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data" (OuterVolumeSpecName: "config-data") pod "66dac335-a776-429f-a37b-56ec1691e0c8" (UID: "66dac335-a776-429f-a37b-56ec1691e0c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.656458 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.656502 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-469dz\" (UniqueName: \"kubernetes.io/projected/66dac335-a776-429f-a37b-56ec1691e0c8-kube-api-access-469dz\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:24 crc kubenswrapper[4762]: I1009 15:56:24.656516 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dac335-a776-429f-a37b-56ec1691e0c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.005488 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e38ba711-b6dd-42aa-b526-3c170fea1b48" path="/var/lib/kubelet/pods/e38ba711-b6dd-42aa-b526-3c170fea1b48/volumes" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.045261 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.102946 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"eafc81b3-49af-41be-9689-964ee18e7fc7","Type":"ContainerStarted","Data":"9bb5b15e8d26b715f805f2ab07d843c543a5863d68366dcac37882b9fa77433b"} Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.107829 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"66dac335-a776-429f-a37b-56ec1691e0c8","Type":"ContainerDied","Data":"12c77e5d07fc1f53d0f9a87d43b6bab39dfd3218906d019e54a39a91ea4cc833"} Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.107988 4762 scope.go:117] "RemoveContainer" containerID="67600f189fe2409693d2e29080955b9f8156a17d49cb0308f7b361f62a93328b" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.108223 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.111330 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.111441 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f962d3eb-c1dd-4738-97a9-971f450fef59","Type":"ContainerDied","Data":"ffb1d622d976be3ef509a4c92d27500ef85a0f5365d3eefa3060a1fe3d4dc463"} Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.204038 4762 scope.go:117] "RemoveContainer" containerID="87cff1325fecaf8e8cfdd99e4c8c9bd4f23a4940e582f3edfc790b4c7e3e38fb" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.230124 4762 scope.go:117] "RemoveContainer" containerID="cb38c41a804e3cabe8de7033c6b2ddfb8dbc929246868ac398768e832835b202" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.240081 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.272202 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.282790 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.298768 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307013 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: E1009 15:56:25.307537 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-api" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307555 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-api" Oct 09 15:56:25 crc kubenswrapper[4762]: E1009 15:56:25.307574 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307580 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" Oct 09 15:56:25 crc kubenswrapper[4762]: E1009 15:56:25.307617 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307624 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" Oct 09 15:56:25 crc kubenswrapper[4762]: E1009 15:56:25.307656 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-log" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307663 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-log" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307857 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-log" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307872 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-api" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307884 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" containerName="nova-api-log" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.307899 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" containerName="nova-metadata-metadata" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.309087 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.309585 4762 scope.go:117] "RemoveContainer" containerID="962de3aef6def46549360f3931b75c5d2559742f3be1a1bb3ad211b71e156e39" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.314512 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.318732 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.320618 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.323567 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.329495 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.349094 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406295 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406340 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f29764a4-4682-4b9e-b089-c3e59c2a9489-logs\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406445 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406466 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe462185-8765-4389-b162-e73854d7eb61-logs\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406489 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx57n\" (UniqueName: \"kubernetes.io/projected/fe462185-8765-4389-b162-e73854d7eb61-kube-api-access-xx57n\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.406963 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-config-data\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.407058 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-config-data\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.407141 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scxps\" (UniqueName: \"kubernetes.io/projected/f29764a4-4682-4b9e-b089-c3e59c2a9489-kube-api-access-scxps\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509158 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe462185-8765-4389-b162-e73854d7eb61-logs\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx57n\" (UniqueName: \"kubernetes.io/projected/fe462185-8765-4389-b162-e73854d7eb61-kube-api-access-xx57n\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509332 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-config-data\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509354 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-config-data\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509373 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scxps\" (UniqueName: \"kubernetes.io/projected/f29764a4-4682-4b9e-b089-c3e59c2a9489-kube-api-access-scxps\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509407 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509426 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f29764a4-4682-4b9e-b089-c3e59c2a9489-logs\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509531 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.509779 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe462185-8765-4389-b162-e73854d7eb61-logs\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.510053 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f29764a4-4682-4b9e-b089-c3e59c2a9489-logs\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.514201 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-config-data\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.514695 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe462185-8765-4389-b162-e73854d7eb61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.524680 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-config-data\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.526743 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f29764a4-4682-4b9e-b089-c3e59c2a9489-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.527043 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx57n\" (UniqueName: \"kubernetes.io/projected/fe462185-8765-4389-b162-e73854d7eb61-kube-api-access-xx57n\") pod \"nova-api-0\" (UID: \"fe462185-8765-4389-b162-e73854d7eb61\") " pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.537141 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scxps\" (UniqueName: \"kubernetes.io/projected/f29764a4-4682-4b9e-b089-c3e59c2a9489-kube-api-access-scxps\") pod \"nova-metadata-0\" (UID: \"f29764a4-4682-4b9e-b089-c3e59c2a9489\") " pod="openstack/nova-metadata-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.626608 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 15:56:25 crc kubenswrapper[4762]: I1009 15:56:25.649427 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.133429 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.136676 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"eafc81b3-49af-41be-9689-964ee18e7fc7","Type":"ContainerStarted","Data":"28667e216e458850a8282d57edf4b0ec238af83f269c7405d3d1bc38eb2e32ce"} Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.199845 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.199818719 podStartE2EDuration="2.199818719s" podCreationTimestamp="2025-10-09 15:56:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:56:26.176106359 +0000 UTC m=+9061.949897398" watchObservedRunningTime="2025-10-09 15:56:26.199818719 +0000 UTC m=+9061.973609758" Oct 09 15:56:26 crc kubenswrapper[4762]: W1009 15:56:26.222302 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe462185_8765_4389_b162_e73854d7eb61.slice/crio-4dda980395f43fe8c448012e1651f30da82722bdfd01f8dafde8c63ddd863f32 WatchSource:0}: Error finding container 4dda980395f43fe8c448012e1651f30da82722bdfd01f8dafde8c63ddd863f32: Status 404 returned error can't find the container with id 4dda980395f43fe8c448012e1651f30da82722bdfd01f8dafde8c63ddd863f32 Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.232914 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 15:56:26 crc kubenswrapper[4762]: E1009 15:56:26.260794 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.979798 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dac335-a776-429f-a37b-56ec1691e0c8" path="/var/lib/kubelet/pods/66dac335-a776-429f-a37b-56ec1691e0c8/volumes" Oct 09 15:56:26 crc kubenswrapper[4762]: I1009 15:56:26.981669 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f962d3eb-c1dd-4738-97a9-971f450fef59" path="/var/lib/kubelet/pods/f962d3eb-c1dd-4738-97a9-971f450fef59/volumes" Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.158020 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f29764a4-4682-4b9e-b089-c3e59c2a9489","Type":"ContainerStarted","Data":"0914bed2a374a380d17ab6d58387010117239f5f67071700f40e453d8a44b854"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.158072 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f29764a4-4682-4b9e-b089-c3e59c2a9489","Type":"ContainerStarted","Data":"8f312fb22f0ad02b898877b06027426c3aa344d09adda6ed8a7e75ce6eb54623"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.158084 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f29764a4-4682-4b9e-b089-c3e59c2a9489","Type":"ContainerStarted","Data":"e789df0036dc494a98253117ef49d5a8bcf0a3fabc81fe223c7c827abc7cadb3"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.166391 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fe462185-8765-4389-b162-e73854d7eb61","Type":"ContainerStarted","Data":"2f0bfd27694149778ea26dbf1ccec77a4007a7600bd6493d08e3a519ae67eed2"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.166440 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fe462185-8765-4389-b162-e73854d7eb61","Type":"ContainerStarted","Data":"6d5b99e8a446c4d4c64c581c3b2dd06f823d9901566bc88be1e93f94a5054d2c"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.166452 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fe462185-8765-4389-b162-e73854d7eb61","Type":"ContainerStarted","Data":"4dda980395f43fe8c448012e1651f30da82722bdfd01f8dafde8c63ddd863f32"} Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.188549 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.18852471 podStartE2EDuration="2.18852471s" podCreationTimestamp="2025-10-09 15:56:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:56:27.176342351 +0000 UTC m=+9062.950133400" watchObservedRunningTime="2025-10-09 15:56:27.18852471 +0000 UTC m=+9062.962315769" Oct 09 15:56:27 crc kubenswrapper[4762]: I1009 15:56:27.197765 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.197748702 podStartE2EDuration="2.197748702s" podCreationTimestamp="2025-10-09 15:56:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 15:56:27.193846299 +0000 UTC m=+9062.967637338" watchObservedRunningTime="2025-10-09 15:56:27.197748702 +0000 UTC m=+9062.971539741" Oct 09 15:56:29 crc kubenswrapper[4762]: I1009 15:56:29.536837 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 15:56:30 crc kubenswrapper[4762]: I1009 15:56:30.650327 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:56:30 crc kubenswrapper[4762]: I1009 15:56:30.650700 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 15:56:31 crc kubenswrapper[4762]: I1009 15:56:31.544306 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 09 15:56:32 crc kubenswrapper[4762]: I1009 15:56:32.600052 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 09 15:56:34 crc kubenswrapper[4762]: I1009 15:56:34.537014 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 15:56:34 crc kubenswrapper[4762]: I1009 15:56:34.570756 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 15:56:35 crc kubenswrapper[4762]: I1009 15:56:35.302229 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 15:56:35 crc kubenswrapper[4762]: I1009 15:56:35.627032 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:56:35 crc kubenswrapper[4762]: I1009 15:56:35.627079 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 15:56:35 crc kubenswrapper[4762]: I1009 15:56:35.650627 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:56:35 crc kubenswrapper[4762]: I1009 15:56:35.650700 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 15:56:36 crc kubenswrapper[4762]: E1009 15:56:36.562899 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:56:36 crc kubenswrapper[4762]: I1009 15:56:36.792850 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fe462185-8765-4389-b162-e73854d7eb61" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:56:36 crc kubenswrapper[4762]: I1009 15:56:36.793801 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f29764a4-4682-4b9e-b089-c3e59c2a9489" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.196:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:56:36 crc kubenswrapper[4762]: I1009 15:56:36.794111 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fe462185-8765-4389-b162-e73854d7eb61" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:56:36 crc kubenswrapper[4762]: I1009 15:56:36.794148 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f29764a4-4682-4b9e-b089-c3e59c2a9489" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.196:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 15:56:36 crc kubenswrapper[4762]: I1009 15:56:36.965580 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:56:36 crc kubenswrapper[4762]: E1009 15:56:36.965877 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.632443 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.633774 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.634156 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.634191 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.638721 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.640412 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.655051 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.657186 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 15:56:45 crc kubenswrapper[4762]: I1009 15:56:45.658083 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:56:46 crc kubenswrapper[4762]: I1009 15:56:46.373073 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 15:56:46 crc kubenswrapper[4762]: E1009 15:56:46.825205 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.281247 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv"] Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.283400 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.289196 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.289739 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.290010 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.290468 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.291485 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.291822 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-whcgt" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.292182 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.297610 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv"] Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301442 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301516 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301586 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301740 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301770 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301851 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301883 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56kcn\" (UniqueName: \"kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301908 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301941 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301969 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.301995 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.403842 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.403936 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.403993 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404119 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404154 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404208 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404228 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56kcn\" (UniqueName: \"kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404246 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404266 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404287 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404306 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.404967 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.405148 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.410495 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.411807 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.413770 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.415042 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.419272 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.421135 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.421882 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.423725 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.429132 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56kcn\" (UniqueName: \"kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:47 crc kubenswrapper[4762]: I1009 15:56:47.626370 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 15:56:48 crc kubenswrapper[4762]: I1009 15:56:48.140022 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv"] Oct 09 15:56:48 crc kubenswrapper[4762]: I1009 15:56:48.393115 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" event={"ID":"461c7940-1521-4400-8973-25f23794ccc6","Type":"ContainerStarted","Data":"14086c79c040293315e5580d3655bcf6d9dc25108c9d5f34bd8fb35b41e676c7"} Oct 09 15:56:48 crc kubenswrapper[4762]: I1009 15:56:48.965800 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:56:48 crc kubenswrapper[4762]: E1009 15:56:48.966394 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:56:50 crc kubenswrapper[4762]: I1009 15:56:50.412802 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" event={"ID":"461c7940-1521-4400-8973-25f23794ccc6","Type":"ContainerStarted","Data":"69703d170a7adf8cc4f1024382429941eab298b7420b13a7f16dea9cce064406"} Oct 09 15:56:50 crc kubenswrapper[4762]: I1009 15:56:50.441678 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" podStartSLOduration=3.007390339 podStartE2EDuration="3.441652369s" podCreationTimestamp="2025-10-09 15:56:47 +0000 UTC" firstStartedPulling="2025-10-09 15:56:48.143312678 +0000 UTC m=+9083.917103717" lastFinishedPulling="2025-10-09 15:56:48.577574708 +0000 UTC m=+9084.351365747" observedRunningTime="2025-10-09 15:56:50.429982804 +0000 UTC m=+9086.203773843" watchObservedRunningTime="2025-10-09 15:56:50.441652369 +0000 UTC m=+9086.215443398" Oct 09 15:56:57 crc kubenswrapper[4762]: E1009 15:56:57.100975 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:57:01 crc kubenswrapper[4762]: I1009 15:57:01.965608 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:57:01 crc kubenswrapper[4762]: E1009 15:57:01.966464 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:57:07 crc kubenswrapper[4762]: E1009 15:57:07.380314 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:57:12 crc kubenswrapper[4762]: I1009 15:57:12.965751 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:57:12 crc kubenswrapper[4762]: E1009 15:57:12.966426 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:57:17 crc kubenswrapper[4762]: E1009 15:57:17.653029 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dac335_a776_429f_a37b_56ec1691e0c8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf962d3eb_c1dd_4738_97a9_971f450fef59.slice\": RecentStats: unable to find data in memory cache]" Oct 09 15:57:25 crc kubenswrapper[4762]: I1009 15:57:25.965272 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:57:25 crc kubenswrapper[4762]: E1009 15:57:25.966178 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:57:38 crc kubenswrapper[4762]: I1009 15:57:38.965541 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:57:38 crc kubenswrapper[4762]: E1009 15:57:38.966315 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:57:52 crc kubenswrapper[4762]: I1009 15:57:52.966297 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:57:52 crc kubenswrapper[4762]: E1009 15:57:52.967136 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:58:03 crc kubenswrapper[4762]: I1009 15:58:03.965113 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:58:03 crc kubenswrapper[4762]: E1009 15:58:03.966045 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:58:15 crc kubenswrapper[4762]: I1009 15:58:15.966069 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:58:15 crc kubenswrapper[4762]: E1009 15:58:15.966962 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:58:30 crc kubenswrapper[4762]: I1009 15:58:30.965072 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:58:30 crc kubenswrapper[4762]: E1009 15:58:30.966941 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 15:58:45 crc kubenswrapper[4762]: I1009 15:58:45.966016 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 15:58:46 crc kubenswrapper[4762]: I1009 15:58:46.596643 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf"} Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.149698 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx"] Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.152075 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.156274 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.156302 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.163281 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx"] Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.232168 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.232289 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2hlk\" (UniqueName: \"kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.232341 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.333923 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.334041 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2hlk\" (UniqueName: \"kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.334080 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.335366 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.340468 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.351303 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2hlk\" (UniqueName: \"kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk\") pod \"collect-profiles-29333760-cbxqx\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.482360 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:00 crc kubenswrapper[4762]: I1009 16:00:00.935257 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx"] Oct 09 16:00:00 crc kubenswrapper[4762]: W1009 16:00:00.938984 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ac2c32b_ce32_4584_a002_91e4a510ce0c.slice/crio-fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed WatchSource:0}: Error finding container fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed: Status 404 returned error can't find the container with id fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed Oct 09 16:00:01 crc kubenswrapper[4762]: I1009 16:00:01.322063 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" event={"ID":"3ac2c32b-ce32-4584-a002-91e4a510ce0c","Type":"ContainerStarted","Data":"3282ba3c5f5c42ab082319068d18a276d889f047cbdcf137fcfe0acb8fb9585b"} Oct 09 16:00:01 crc kubenswrapper[4762]: I1009 16:00:01.322396 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" event={"ID":"3ac2c32b-ce32-4584-a002-91e4a510ce0c","Type":"ContainerStarted","Data":"fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed"} Oct 09 16:00:01 crc kubenswrapper[4762]: I1009 16:00:01.342867 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" podStartSLOduration=1.3428466989999999 podStartE2EDuration="1.342846699s" podCreationTimestamp="2025-10-09 16:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 16:00:01.340021855 +0000 UTC m=+9277.113812904" watchObservedRunningTime="2025-10-09 16:00:01.342846699 +0000 UTC m=+9277.116637738" Oct 09 16:00:02 crc kubenswrapper[4762]: I1009 16:00:02.338230 4762 generic.go:334] "Generic (PLEG): container finished" podID="3ac2c32b-ce32-4584-a002-91e4a510ce0c" containerID="3282ba3c5f5c42ab082319068d18a276d889f047cbdcf137fcfe0acb8fb9585b" exitCode=0 Oct 09 16:00:02 crc kubenswrapper[4762]: I1009 16:00:02.338332 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" event={"ID":"3ac2c32b-ce32-4584-a002-91e4a510ce0c","Type":"ContainerDied","Data":"3282ba3c5f5c42ab082319068d18a276d889f047cbdcf137fcfe0acb8fb9585b"} Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.722135 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.806694 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume\") pod \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.806745 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume\") pod \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.807014 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2hlk\" (UniqueName: \"kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk\") pod \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\" (UID: \"3ac2c32b-ce32-4584-a002-91e4a510ce0c\") " Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.807836 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume" (OuterVolumeSpecName: "config-volume") pod "3ac2c32b-ce32-4584-a002-91e4a510ce0c" (UID: "3ac2c32b-ce32-4584-a002-91e4a510ce0c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.814468 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3ac2c32b-ce32-4584-a002-91e4a510ce0c" (UID: "3ac2c32b-ce32-4584-a002-91e4a510ce0c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.814870 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk" (OuterVolumeSpecName: "kube-api-access-d2hlk") pod "3ac2c32b-ce32-4584-a002-91e4a510ce0c" (UID: "3ac2c32b-ce32-4584-a002-91e4a510ce0c"). InnerVolumeSpecName "kube-api-access-d2hlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.909123 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2hlk\" (UniqueName: \"kubernetes.io/projected/3ac2c32b-ce32-4584-a002-91e4a510ce0c-kube-api-access-d2hlk\") on node \"crc\" DevicePath \"\"" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.909170 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ac2c32b-ce32-4584-a002-91e4a510ce0c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 16:00:03 crc kubenswrapper[4762]: I1009 16:00:03.909180 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ac2c32b-ce32-4584-a002-91e4a510ce0c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.362550 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" event={"ID":"3ac2c32b-ce32-4584-a002-91e4a510ce0c","Type":"ContainerDied","Data":"fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed"} Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.362849 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fee8c54ca707c187edf2dcf5b8f6a8ad115875d2105318ee0aea782b1fe516ed" Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.362674 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333760-cbxqx" Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.574772 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t"] Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.588792 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333715-tk74t"] Oct 09 16:00:04 crc kubenswrapper[4762]: I1009 16:00:04.984964 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f8ed55b-7571-4665-ba42-64d1e3b6d7a5" path="/var/lib/kubelet/pods/1f8ed55b-7571-4665-ba42-64d1e3b6d7a5/volumes" Oct 09 16:00:39 crc kubenswrapper[4762]: I1009 16:00:39.471617 4762 scope.go:117] "RemoveContainer" containerID="a62d3cd2090d9919489b9ded7ed05f65c087f6d697159e7171d5fb03c20577c6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.158942 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29333761-hmxz6"] Oct 09 16:01:00 crc kubenswrapper[4762]: E1009 16:01:00.160131 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ac2c32b-ce32-4584-a002-91e4a510ce0c" containerName="collect-profiles" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.160152 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ac2c32b-ce32-4584-a002-91e4a510ce0c" containerName="collect-profiles" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.160442 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ac2c32b-ce32-4584-a002-91e4a510ce0c" containerName="collect-profiles" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.161534 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.173018 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333761-hmxz6"] Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.228807 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzmm4\" (UniqueName: \"kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.229151 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.229276 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.229308 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.330381 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzmm4\" (UniqueName: \"kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.330430 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.330499 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.330524 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.336540 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.336547 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.338037 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.347503 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzmm4\" (UniqueName: \"kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4\") pod \"keystone-cron-29333761-hmxz6\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.486922 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:00 crc kubenswrapper[4762]: I1009 16:01:00.954424 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333761-hmxz6"] Oct 09 16:01:01 crc kubenswrapper[4762]: I1009 16:01:01.935981 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333761-hmxz6" event={"ID":"93a0a4a8-5710-4928-865e-ecb19bfdb7b0","Type":"ContainerStarted","Data":"fed655e7e540f07f0a7fa4b4fd554ba542cdadf3a361e54aa1ed313c13dae928"} Oct 09 16:01:01 crc kubenswrapper[4762]: I1009 16:01:01.936677 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333761-hmxz6" event={"ID":"93a0a4a8-5710-4928-865e-ecb19bfdb7b0","Type":"ContainerStarted","Data":"50bacd44acd74272300334fa01f2c5ac1ffc6c5bd3466dcd83770c3a71ee17ca"} Oct 09 16:01:01 crc kubenswrapper[4762]: I1009 16:01:01.961458 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29333761-hmxz6" podStartSLOduration=1.961417853 podStartE2EDuration="1.961417853s" podCreationTimestamp="2025-10-09 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 16:01:01.954687788 +0000 UTC m=+9337.728478847" watchObservedRunningTime="2025-10-09 16:01:01.961417853 +0000 UTC m=+9337.735208902" Oct 09 16:01:04 crc kubenswrapper[4762]: I1009 16:01:04.967801 4762 generic.go:334] "Generic (PLEG): container finished" podID="93a0a4a8-5710-4928-865e-ecb19bfdb7b0" containerID="fed655e7e540f07f0a7fa4b4fd554ba542cdadf3a361e54aa1ed313c13dae928" exitCode=0 Oct 09 16:01:04 crc kubenswrapper[4762]: I1009 16:01:04.978920 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333761-hmxz6" event={"ID":"93a0a4a8-5710-4928-865e-ecb19bfdb7b0","Type":"ContainerDied","Data":"fed655e7e540f07f0a7fa4b4fd554ba542cdadf3a361e54aa1ed313c13dae928"} Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.358374 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.369517 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data\") pod \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.369599 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys\") pod \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.369721 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle\") pod \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.369760 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzmm4\" (UniqueName: \"kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4\") pod \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\" (UID: \"93a0a4a8-5710-4928-865e-ecb19bfdb7b0\") " Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.376848 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "93a0a4a8-5710-4928-865e-ecb19bfdb7b0" (UID: "93a0a4a8-5710-4928-865e-ecb19bfdb7b0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.380880 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4" (OuterVolumeSpecName: "kube-api-access-fzmm4") pod "93a0a4a8-5710-4928-865e-ecb19bfdb7b0" (UID: "93a0a4a8-5710-4928-865e-ecb19bfdb7b0"). InnerVolumeSpecName "kube-api-access-fzmm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.417788 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "93a0a4a8-5710-4928-865e-ecb19bfdb7b0" (UID: "93a0a4a8-5710-4928-865e-ecb19bfdb7b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.440537 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data" (OuterVolumeSpecName: "config-data") pod "93a0a4a8-5710-4928-865e-ecb19bfdb7b0" (UID: "93a0a4a8-5710-4928-865e-ecb19bfdb7b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.472929 4762 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.472961 4762 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.472969 4762 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.472980 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzmm4\" (UniqueName: \"kubernetes.io/projected/93a0a4a8-5710-4928-865e-ecb19bfdb7b0-kube-api-access-fzmm4\") on node \"crc\" DevicePath \"\"" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.989048 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333761-hmxz6" event={"ID":"93a0a4a8-5710-4928-865e-ecb19bfdb7b0","Type":"ContainerDied","Data":"50bacd44acd74272300334fa01f2c5ac1ffc6c5bd3466dcd83770c3a71ee17ca"} Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.989443 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50bacd44acd74272300334fa01f2c5ac1ffc6c5bd3466dcd83770c3a71ee17ca" Oct 09 16:01:06 crc kubenswrapper[4762]: I1009 16:01:06.989103 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333761-hmxz6" Oct 09 16:01:11 crc kubenswrapper[4762]: I1009 16:01:11.969685 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:01:11 crc kubenswrapper[4762]: I1009 16:01:11.970439 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:01:41 crc kubenswrapper[4762]: I1009 16:01:41.969588 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:01:41 crc kubenswrapper[4762]: I1009 16:01:41.970129 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:02:11 crc kubenswrapper[4762]: I1009 16:02:11.969777 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:02:11 crc kubenswrapper[4762]: I1009 16:02:11.970360 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:02:11 crc kubenswrapper[4762]: I1009 16:02:11.970396 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 16:02:11 crc kubenswrapper[4762]: I1009 16:02:11.970807 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 16:02:11 crc kubenswrapper[4762]: I1009 16:02:11.970862 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf" gracePeriod=600 Oct 09 16:02:12 crc kubenswrapper[4762]: I1009 16:02:12.655501 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf" exitCode=0 Oct 09 16:02:12 crc kubenswrapper[4762]: I1009 16:02:12.655560 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf"} Oct 09 16:02:12 crc kubenswrapper[4762]: I1009 16:02:12.656338 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46"} Oct 09 16:02:12 crc kubenswrapper[4762]: I1009 16:02:12.656366 4762 scope.go:117] "RemoveContainer" containerID="915bf3aac40e436d24352cca144821f66a07852e961cfcafeb700aef3cb1b274" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.116135 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:26 crc kubenswrapper[4762]: E1009 16:04:26.117086 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a0a4a8-5710-4928-865e-ecb19bfdb7b0" containerName="keystone-cron" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.117104 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a0a4a8-5710-4928-865e-ecb19bfdb7b0" containerName="keystone-cron" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.117356 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="93a0a4a8-5710-4928-865e-ecb19bfdb7b0" containerName="keystone-cron" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.119208 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.135400 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.186928 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.186993 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.187023 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlldl\" (UniqueName: \"kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.288650 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.288706 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.288730 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlldl\" (UniqueName: \"kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.289378 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.289405 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.311504 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlldl\" (UniqueName: \"kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl\") pod \"certified-operators-2ctkx\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:26 crc kubenswrapper[4762]: I1009 16:04:26.452006 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:27 crc kubenswrapper[4762]: I1009 16:04:27.021717 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:27 crc kubenswrapper[4762]: I1009 16:04:27.999448 4762 generic.go:334] "Generic (PLEG): container finished" podID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerID="0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187" exitCode=0 Oct 09 16:04:28 crc kubenswrapper[4762]: I1009 16:04:27.999496 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerDied","Data":"0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187"} Oct 09 16:04:28 crc kubenswrapper[4762]: I1009 16:04:28.001557 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerStarted","Data":"b20b0970476c80f578d5585ef008d0832d3db4caa54d999d3a38b38031ad90df"} Oct 09 16:04:28 crc kubenswrapper[4762]: I1009 16:04:28.001914 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 16:04:30 crc kubenswrapper[4762]: I1009 16:04:30.026867 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerStarted","Data":"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605"} Oct 09 16:04:31 crc kubenswrapper[4762]: I1009 16:04:31.037682 4762 generic.go:334] "Generic (PLEG): container finished" podID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerID="5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605" exitCode=0 Oct 09 16:04:31 crc kubenswrapper[4762]: I1009 16:04:31.037773 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerDied","Data":"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605"} Oct 09 16:04:32 crc kubenswrapper[4762]: I1009 16:04:32.051726 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerStarted","Data":"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d"} Oct 09 16:04:32 crc kubenswrapper[4762]: I1009 16:04:32.080937 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2ctkx" podStartSLOduration=2.627427089 podStartE2EDuration="6.080917662s" podCreationTimestamp="2025-10-09 16:04:26 +0000 UTC" firstStartedPulling="2025-10-09 16:04:28.001559041 +0000 UTC m=+9543.775350110" lastFinishedPulling="2025-10-09 16:04:31.455049644 +0000 UTC m=+9547.228840683" observedRunningTime="2025-10-09 16:04:32.077072792 +0000 UTC m=+9547.850863831" watchObservedRunningTime="2025-10-09 16:04:32.080917662 +0000 UTC m=+9547.854708701" Oct 09 16:04:36 crc kubenswrapper[4762]: I1009 16:04:36.452738 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:36 crc kubenswrapper[4762]: I1009 16:04:36.453351 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:36 crc kubenswrapper[4762]: I1009 16:04:36.500600 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:37 crc kubenswrapper[4762]: I1009 16:04:37.149682 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:37 crc kubenswrapper[4762]: I1009 16:04:37.201086 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.119289 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2ctkx" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="registry-server" containerID="cri-o://22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d" gracePeriod=2 Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.586558 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.683844 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities\") pod \"8b29f993-3147-44c7-a8ba-5f324f72469f\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.684056 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content\") pod \"8b29f993-3147-44c7-a8ba-5f324f72469f\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.684104 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlldl\" (UniqueName: \"kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl\") pod \"8b29f993-3147-44c7-a8ba-5f324f72469f\" (UID: \"8b29f993-3147-44c7-a8ba-5f324f72469f\") " Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.685152 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities" (OuterVolumeSpecName: "utilities") pod "8b29f993-3147-44c7-a8ba-5f324f72469f" (UID: "8b29f993-3147-44c7-a8ba-5f324f72469f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.700073 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl" (OuterVolumeSpecName: "kube-api-access-dlldl") pod "8b29f993-3147-44c7-a8ba-5f324f72469f" (UID: "8b29f993-3147-44c7-a8ba-5f324f72469f"). InnerVolumeSpecName "kube-api-access-dlldl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.740332 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b29f993-3147-44c7-a8ba-5f324f72469f" (UID: "8b29f993-3147-44c7-a8ba-5f324f72469f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.788038 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.788088 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlldl\" (UniqueName: \"kubernetes.io/projected/8b29f993-3147-44c7-a8ba-5f324f72469f-kube-api-access-dlldl\") on node \"crc\" DevicePath \"\"" Oct 09 16:04:39 crc kubenswrapper[4762]: I1009 16:04:39.788104 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b29f993-3147-44c7-a8ba-5f324f72469f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.141188 4762 generic.go:334] "Generic (PLEG): container finished" podID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerID="22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d" exitCode=0 Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.141251 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2ctkx" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.141288 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerDied","Data":"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d"} Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.141686 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2ctkx" event={"ID":"8b29f993-3147-44c7-a8ba-5f324f72469f","Type":"ContainerDied","Data":"b20b0970476c80f578d5585ef008d0832d3db4caa54d999d3a38b38031ad90df"} Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.141710 4762 scope.go:117] "RemoveContainer" containerID="22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.175643 4762 scope.go:117] "RemoveContainer" containerID="5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.186090 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.196182 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2ctkx"] Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.219675 4762 scope.go:117] "RemoveContainer" containerID="0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.250060 4762 scope.go:117] "RemoveContainer" containerID="22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d" Oct 09 16:04:40 crc kubenswrapper[4762]: E1009 16:04:40.250507 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d\": container with ID starting with 22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d not found: ID does not exist" containerID="22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.250554 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d"} err="failed to get container status \"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d\": rpc error: code = NotFound desc = could not find container \"22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d\": container with ID starting with 22fdfcce1ba5fea6562ed538c1565a0854dfd3289baa538f094ab3b2916ab03d not found: ID does not exist" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.250578 4762 scope.go:117] "RemoveContainer" containerID="5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605" Oct 09 16:04:40 crc kubenswrapper[4762]: E1009 16:04:40.250989 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605\": container with ID starting with 5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605 not found: ID does not exist" containerID="5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.251026 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605"} err="failed to get container status \"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605\": rpc error: code = NotFound desc = could not find container \"5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605\": container with ID starting with 5311d2bc9a13cb86a922e9110e3b9343605ae7402e7bab39c68f03ef3d570605 not found: ID does not exist" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.251047 4762 scope.go:117] "RemoveContainer" containerID="0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187" Oct 09 16:04:40 crc kubenswrapper[4762]: E1009 16:04:40.251328 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187\": container with ID starting with 0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187 not found: ID does not exist" containerID="0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.251361 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187"} err="failed to get container status \"0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187\": rpc error: code = NotFound desc = could not find container \"0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187\": container with ID starting with 0854a5130ab4fa04020ea963e79d6865fa74d49fac2402189fe9576114509187 not found: ID does not exist" Oct 09 16:04:40 crc kubenswrapper[4762]: I1009 16:04:40.982272 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" path="/var/lib/kubelet/pods/8b29f993-3147-44c7-a8ba-5f324f72469f/volumes" Oct 09 16:04:41 crc kubenswrapper[4762]: I1009 16:04:41.969919 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:04:41 crc kubenswrapper[4762]: I1009 16:04:41.970332 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.495546 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:04:53 crc kubenswrapper[4762]: E1009 16:04:53.500484 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="extract-utilities" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.500517 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="extract-utilities" Oct 09 16:04:53 crc kubenswrapper[4762]: E1009 16:04:53.500572 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="extract-content" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.500582 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="extract-content" Oct 09 16:04:53 crc kubenswrapper[4762]: E1009 16:04:53.500610 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="registry-server" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.500618 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="registry-server" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.501547 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b29f993-3147-44c7-a8ba-5f324f72469f" containerName="registry-server" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.507280 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.535359 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.584020 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.584355 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzsfg\" (UniqueName: \"kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.584492 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.686618 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.686946 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzsfg\" (UniqueName: \"kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.687078 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.687025 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.687316 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.710898 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzsfg\" (UniqueName: \"kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg\") pod \"redhat-marketplace-kkzc9\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:53 crc kubenswrapper[4762]: I1009 16:04:53.836262 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:04:54 crc kubenswrapper[4762]: I1009 16:04:54.361382 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:04:55 crc kubenswrapper[4762]: I1009 16:04:55.284697 4762 generic.go:334] "Generic (PLEG): container finished" podID="7bed9871-3078-4f7f-8718-779061331030" containerID="308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb" exitCode=0 Oct 09 16:04:55 crc kubenswrapper[4762]: I1009 16:04:55.284751 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerDied","Data":"308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb"} Oct 09 16:04:55 crc kubenswrapper[4762]: I1009 16:04:55.285407 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerStarted","Data":"4a5e7bd836871bd3cfe9c6dc46bd376510ace21866fb4341fb7163b0c57e5489"} Oct 09 16:04:57 crc kubenswrapper[4762]: I1009 16:04:57.305802 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerStarted","Data":"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c"} Oct 09 16:04:58 crc kubenswrapper[4762]: I1009 16:04:58.328947 4762 generic.go:334] "Generic (PLEG): container finished" podID="7bed9871-3078-4f7f-8718-779061331030" containerID="342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c" exitCode=0 Oct 09 16:04:58 crc kubenswrapper[4762]: I1009 16:04:58.329300 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerDied","Data":"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c"} Oct 09 16:05:00 crc kubenswrapper[4762]: I1009 16:05:00.353624 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerStarted","Data":"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a"} Oct 09 16:05:00 crc kubenswrapper[4762]: I1009 16:05:00.380821 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kkzc9" podStartSLOduration=3.251427758 podStartE2EDuration="7.380796937s" podCreationTimestamp="2025-10-09 16:04:53 +0000 UTC" firstStartedPulling="2025-10-09 16:04:55.28695088 +0000 UTC m=+9571.060741919" lastFinishedPulling="2025-10-09 16:04:59.416320059 +0000 UTC m=+9575.190111098" observedRunningTime="2025-10-09 16:05:00.372627974 +0000 UTC m=+9576.146419023" watchObservedRunningTime="2025-10-09 16:05:00.380796937 +0000 UTC m=+9576.154587976" Oct 09 16:05:03 crc kubenswrapper[4762]: I1009 16:05:03.837906 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:03 crc kubenswrapper[4762]: I1009 16:05:03.838491 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:03 crc kubenswrapper[4762]: I1009 16:05:03.887234 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:04 crc kubenswrapper[4762]: I1009 16:05:04.445542 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:04 crc kubenswrapper[4762]: I1009 16:05:04.500894 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:05:06 crc kubenswrapper[4762]: I1009 16:05:06.416336 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kkzc9" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="registry-server" containerID="cri-o://29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a" gracePeriod=2 Oct 09 16:05:06 crc kubenswrapper[4762]: I1009 16:05:06.905273 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.101198 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content\") pod \"7bed9871-3078-4f7f-8718-779061331030\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.101674 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzsfg\" (UniqueName: \"kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg\") pod \"7bed9871-3078-4f7f-8718-779061331030\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.101826 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities\") pod \"7bed9871-3078-4f7f-8718-779061331030\" (UID: \"7bed9871-3078-4f7f-8718-779061331030\") " Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.103491 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities" (OuterVolumeSpecName: "utilities") pod "7bed9871-3078-4f7f-8718-779061331030" (UID: "7bed9871-3078-4f7f-8718-779061331030"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.108204 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg" (OuterVolumeSpecName: "kube-api-access-fzsfg") pod "7bed9871-3078-4f7f-8718-779061331030" (UID: "7bed9871-3078-4f7f-8718-779061331030"). InnerVolumeSpecName "kube-api-access-fzsfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.113570 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7bed9871-3078-4f7f-8718-779061331030" (UID: "7bed9871-3078-4f7f-8718-779061331030"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.204868 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.204906 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzsfg\" (UniqueName: \"kubernetes.io/projected/7bed9871-3078-4f7f-8718-779061331030-kube-api-access-fzsfg\") on node \"crc\" DevicePath \"\"" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.204918 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bed9871-3078-4f7f-8718-779061331030-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.433732 4762 generic.go:334] "Generic (PLEG): container finished" podID="7bed9871-3078-4f7f-8718-779061331030" containerID="29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a" exitCode=0 Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.433785 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerDied","Data":"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a"} Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.433816 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kkzc9" event={"ID":"7bed9871-3078-4f7f-8718-779061331030","Type":"ContainerDied","Data":"4a5e7bd836871bd3cfe9c6dc46bd376510ace21866fb4341fb7163b0c57e5489"} Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.433853 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kkzc9" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.433836 4762 scope.go:117] "RemoveContainer" containerID="29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.467289 4762 scope.go:117] "RemoveContainer" containerID="342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.478735 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.491553 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kkzc9"] Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.503019 4762 scope.go:117] "RemoveContainer" containerID="308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.546924 4762 scope.go:117] "RemoveContainer" containerID="29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a" Oct 09 16:05:07 crc kubenswrapper[4762]: E1009 16:05:07.550968 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a\": container with ID starting with 29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a not found: ID does not exist" containerID="29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.551016 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a"} err="failed to get container status \"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a\": rpc error: code = NotFound desc = could not find container \"29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a\": container with ID starting with 29cb1824f11fdc1f2c1333eb2d11a37e5ef15cd6dc07871466bc5a21ac829e5a not found: ID does not exist" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.551041 4762 scope.go:117] "RemoveContainer" containerID="342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c" Oct 09 16:05:07 crc kubenswrapper[4762]: E1009 16:05:07.551571 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c\": container with ID starting with 342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c not found: ID does not exist" containerID="342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.551655 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c"} err="failed to get container status \"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c\": rpc error: code = NotFound desc = could not find container \"342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c\": container with ID starting with 342f7c5fe22d0c4929e14ab29fe4279ea559deaac7a96f5a02d40bfbd8e4df6c not found: ID does not exist" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.551698 4762 scope.go:117] "RemoveContainer" containerID="308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb" Oct 09 16:05:07 crc kubenswrapper[4762]: E1009 16:05:07.552436 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb\": container with ID starting with 308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb not found: ID does not exist" containerID="308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb" Oct 09 16:05:07 crc kubenswrapper[4762]: I1009 16:05:07.552477 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb"} err="failed to get container status \"308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb\": rpc error: code = NotFound desc = could not find container \"308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb\": container with ID starting with 308df55543e5021ee44d2cf67f784f8e01710268360abfe79fd40525daab2ffb not found: ID does not exist" Oct 09 16:05:08 crc kubenswrapper[4762]: I1009 16:05:08.977008 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bed9871-3078-4f7f-8718-779061331030" path="/var/lib/kubelet/pods/7bed9871-3078-4f7f-8718-779061331030/volumes" Oct 09 16:05:11 crc kubenswrapper[4762]: I1009 16:05:11.969449 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:05:11 crc kubenswrapper[4762]: I1009 16:05:11.970027 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:05:41 crc kubenswrapper[4762]: I1009 16:05:41.969825 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:05:41 crc kubenswrapper[4762]: I1009 16:05:41.970134 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:05:41 crc kubenswrapper[4762]: I1009 16:05:41.970174 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 16:05:41 crc kubenswrapper[4762]: I1009 16:05:41.970816 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 16:05:41 crc kubenswrapper[4762]: I1009 16:05:41.970865 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" gracePeriod=600 Oct 09 16:05:42 crc kubenswrapper[4762]: E1009 16:05:42.164353 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:05:42 crc kubenswrapper[4762]: I1009 16:05:42.820293 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" exitCode=0 Oct 09 16:05:42 crc kubenswrapper[4762]: I1009 16:05:42.820723 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46"} Oct 09 16:05:42 crc kubenswrapper[4762]: I1009 16:05:42.820910 4762 scope.go:117] "RemoveContainer" containerID="7b7cddc82fb99f27be5dd88f2999680a25d86f677e20418b9ab6366a429022cf" Oct 09 16:05:42 crc kubenswrapper[4762]: I1009 16:05:42.822041 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:05:42 crc kubenswrapper[4762]: E1009 16:05:42.822598 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:05:55 crc kubenswrapper[4762]: I1009 16:05:55.965435 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:05:55 crc kubenswrapper[4762]: E1009 16:05:55.966306 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:06:10 crc kubenswrapper[4762]: I1009 16:06:10.965897 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:06:10 crc kubenswrapper[4762]: E1009 16:06:10.966781 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:06:22 crc kubenswrapper[4762]: I1009 16:06:22.966009 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:06:22 crc kubenswrapper[4762]: E1009 16:06:22.966871 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:06:36 crc kubenswrapper[4762]: I1009 16:06:36.966288 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:06:36 crc kubenswrapper[4762]: E1009 16:06:36.967943 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:06:49 crc kubenswrapper[4762]: I1009 16:06:49.966420 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:06:49 crc kubenswrapper[4762]: E1009 16:06:49.967477 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:07:00 crc kubenswrapper[4762]: I1009 16:07:00.965467 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:07:00 crc kubenswrapper[4762]: E1009 16:07:00.966291 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.053942 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:07 crc kubenswrapper[4762]: E1009 16:07:07.054862 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="extract-content" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.054875 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="extract-content" Oct 09 16:07:07 crc kubenswrapper[4762]: E1009 16:07:07.054890 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="registry-server" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.054895 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="registry-server" Oct 09 16:07:07 crc kubenswrapper[4762]: E1009 16:07:07.054914 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="extract-utilities" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.054921 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="extract-utilities" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.055153 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bed9871-3078-4f7f-8718-779061331030" containerName="registry-server" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.057252 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.078030 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.253218 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.257557 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gtn6\" (UniqueName: \"kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.257716 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.257851 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.258443 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.265179 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.360521 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gtn6\" (UniqueName: \"kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.360686 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.360739 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.360777 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.361691 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7r5w\" (UniqueName: \"kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.361783 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.362112 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.362276 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.389425 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gtn6\" (UniqueName: \"kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6\") pod \"redhat-operators-qqpcp\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.464296 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.464380 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.464534 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7r5w\" (UniqueName: \"kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.464812 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.464925 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.482756 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7r5w\" (UniqueName: \"kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w\") pod \"community-operators-r6nps\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.585197 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:07 crc kubenswrapper[4762]: I1009 16:07:07.678440 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.310297 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:08 crc kubenswrapper[4762]: W1009 16:07:08.382136 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc40376a5_9d4d_4ad5_9e17_d008f4b70e23.slice/crio-d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7 WatchSource:0}: Error finding container d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7: Status 404 returned error can't find the container with id d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7 Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.386212 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.696197 4762 generic.go:334] "Generic (PLEG): container finished" podID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerID="f166352d5ebdcd51be3836213c75a7d5a239535fc54581b3727c4e757a7d8363" exitCode=0 Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.696303 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerDied","Data":"f166352d5ebdcd51be3836213c75a7d5a239535fc54581b3727c4e757a7d8363"} Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.696548 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerStarted","Data":"d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7"} Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.698143 4762 generic.go:334] "Generic (PLEG): container finished" podID="b69b59e3-a47e-4e15-9439-f8a072154016" containerID="56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684" exitCode=0 Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.698174 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerDied","Data":"56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684"} Oct 09 16:07:08 crc kubenswrapper[4762]: I1009 16:07:08.698193 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerStarted","Data":"be6e892516e2d8ce0016baf68cd9619a2edf5b70e07d8cd9998b3f2a82dd6209"} Oct 09 16:07:09 crc kubenswrapper[4762]: I1009 16:07:09.713590 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerStarted","Data":"8682d070a5d5a67e5a5e0894f6c274f275ab147c0bdcf1c8c37087feed75c247"} Oct 09 16:07:10 crc kubenswrapper[4762]: I1009 16:07:10.725329 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerStarted","Data":"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894"} Oct 09 16:07:12 crc kubenswrapper[4762]: I1009 16:07:12.965520 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:07:12 crc kubenswrapper[4762]: E1009 16:07:12.966205 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:07:16 crc kubenswrapper[4762]: I1009 16:07:16.785914 4762 generic.go:334] "Generic (PLEG): container finished" podID="b69b59e3-a47e-4e15-9439-f8a072154016" containerID="2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894" exitCode=0 Oct 09 16:07:16 crc kubenswrapper[4762]: I1009 16:07:16.785998 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerDied","Data":"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894"} Oct 09 16:07:21 crc kubenswrapper[4762]: I1009 16:07:21.837442 4762 generic.go:334] "Generic (PLEG): container finished" podID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerID="8682d070a5d5a67e5a5e0894f6c274f275ab147c0bdcf1c8c37087feed75c247" exitCode=0 Oct 09 16:07:21 crc kubenswrapper[4762]: I1009 16:07:21.837511 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerDied","Data":"8682d070a5d5a67e5a5e0894f6c274f275ab147c0bdcf1c8c37087feed75c247"} Oct 09 16:07:21 crc kubenswrapper[4762]: I1009 16:07:21.841568 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerStarted","Data":"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e"} Oct 09 16:07:21 crc kubenswrapper[4762]: I1009 16:07:21.886434 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r6nps" podStartSLOduration=2.467315199 podStartE2EDuration="14.886409233s" podCreationTimestamp="2025-10-09 16:07:07 +0000 UTC" firstStartedPulling="2025-10-09 16:07:08.700345123 +0000 UTC m=+9704.474136152" lastFinishedPulling="2025-10-09 16:07:21.119439147 +0000 UTC m=+9716.893230186" observedRunningTime="2025-10-09 16:07:21.883657931 +0000 UTC m=+9717.657448990" watchObservedRunningTime="2025-10-09 16:07:21.886409233 +0000 UTC m=+9717.660200272" Oct 09 16:07:22 crc kubenswrapper[4762]: I1009 16:07:22.853475 4762 generic.go:334] "Generic (PLEG): container finished" podID="461c7940-1521-4400-8973-25f23794ccc6" containerID="69703d170a7adf8cc4f1024382429941eab298b7420b13a7f16dea9cce064406" exitCode=0 Oct 09 16:07:22 crc kubenswrapper[4762]: I1009 16:07:22.853580 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" event={"ID":"461c7940-1521-4400-8973-25f23794ccc6","Type":"ContainerDied","Data":"69703d170a7adf8cc4f1024382429941eab298b7420b13a7f16dea9cce064406"} Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.399760 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.527868 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.527997 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528044 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528110 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528192 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528224 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528290 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528311 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528344 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528385 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56kcn\" (UniqueName: \"kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.528412 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0\") pod \"461c7940-1521-4400-8973-25f23794ccc6\" (UID: \"461c7940-1521-4400-8973-25f23794ccc6\") " Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.535474 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn" (OuterVolumeSpecName: "kube-api-access-56kcn") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "kube-api-access-56kcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.535582 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.535803 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph" (OuterVolumeSpecName: "ceph") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.567206 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory" (OuterVolumeSpecName: "inventory") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.567627 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.569743 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.570774 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.577346 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.583561 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.592611 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.599158 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "461c7940-1521-4400-8973-25f23794ccc6" (UID: "461c7940-1521-4400-8973-25f23794ccc6"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631495 4762 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631531 4762 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631542 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631553 4762 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631562 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631570 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631579 4762 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631589 4762 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631596 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/461c7940-1521-4400-8973-25f23794ccc6-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631605 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56kcn\" (UniqueName: \"kubernetes.io/projected/461c7940-1521-4400-8973-25f23794ccc6-kube-api-access-56kcn\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.631616 4762 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/461c7940-1521-4400-8973-25f23794ccc6-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.876867 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" event={"ID":"461c7940-1521-4400-8973-25f23794ccc6","Type":"ContainerDied","Data":"14086c79c040293315e5580d3655bcf6d9dc25108c9d5f34bd8fb35b41e676c7"} Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.877186 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14086c79c040293315e5580d3655bcf6d9dc25108c9d5f34bd8fb35b41e676c7" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.876900 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.879702 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerStarted","Data":"12ba493a1654cf0aa22d0f7c60cb4a135afbb11d1f3787638a876893233c54b9"} Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.916927 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qqpcp" podStartSLOduration=2.389020081 podStartE2EDuration="17.9169108s" podCreationTimestamp="2025-10-09 16:07:07 +0000 UTC" firstStartedPulling="2025-10-09 16:07:08.698196116 +0000 UTC m=+9704.471987145" lastFinishedPulling="2025-10-09 16:07:24.226086825 +0000 UTC m=+9719.999877864" observedRunningTime="2025-10-09 16:07:24.896739963 +0000 UTC m=+9720.670531012" watchObservedRunningTime="2025-10-09 16:07:24.9169108 +0000 UTC m=+9720.690701839" Oct 09 16:07:24 crc kubenswrapper[4762]: I1009 16:07:24.973317 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:07:24 crc kubenswrapper[4762]: E1009 16:07:24.973573 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.585849 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.586314 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.650714 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.678747 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.678810 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:27 crc kubenswrapper[4762]: I1009 16:07:27.976312 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:28 crc kubenswrapper[4762]: I1009 16:07:28.027596 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:28 crc kubenswrapper[4762]: I1009 16:07:28.728738 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qqpcp" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="registry-server" probeResult="failure" output=< Oct 09 16:07:28 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 16:07:28 crc kubenswrapper[4762]: > Oct 09 16:07:29 crc kubenswrapper[4762]: I1009 16:07:29.938577 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r6nps" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="registry-server" containerID="cri-o://bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e" gracePeriod=2 Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.442123 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.561207 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7r5w\" (UniqueName: \"kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w\") pod \"b69b59e3-a47e-4e15-9439-f8a072154016\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.561371 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content\") pod \"b69b59e3-a47e-4e15-9439-f8a072154016\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.561732 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities\") pod \"b69b59e3-a47e-4e15-9439-f8a072154016\" (UID: \"b69b59e3-a47e-4e15-9439-f8a072154016\") " Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.562680 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities" (OuterVolumeSpecName: "utilities") pod "b69b59e3-a47e-4e15-9439-f8a072154016" (UID: "b69b59e3-a47e-4e15-9439-f8a072154016"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.573102 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w" (OuterVolumeSpecName: "kube-api-access-b7r5w") pod "b69b59e3-a47e-4e15-9439-f8a072154016" (UID: "b69b59e3-a47e-4e15-9439-f8a072154016"). InnerVolumeSpecName "kube-api-access-b7r5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.614356 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b69b59e3-a47e-4e15-9439-f8a072154016" (UID: "b69b59e3-a47e-4e15-9439-f8a072154016"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.664981 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.665323 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7r5w\" (UniqueName: \"kubernetes.io/projected/b69b59e3-a47e-4e15-9439-f8a072154016-kube-api-access-b7r5w\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.665340 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b69b59e3-a47e-4e15-9439-f8a072154016-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.951890 4762 generic.go:334] "Generic (PLEG): container finished" podID="b69b59e3-a47e-4e15-9439-f8a072154016" containerID="bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e" exitCode=0 Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.951948 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerDied","Data":"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e"} Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.951981 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r6nps" event={"ID":"b69b59e3-a47e-4e15-9439-f8a072154016","Type":"ContainerDied","Data":"be6e892516e2d8ce0016baf68cd9619a2edf5b70e07d8cd9998b3f2a82dd6209"} Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.951999 4762 scope.go:117] "RemoveContainer" containerID="bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.952161 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r6nps" Oct 09 16:07:30 crc kubenswrapper[4762]: I1009 16:07:30.985912 4762 scope.go:117] "RemoveContainer" containerID="2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.000728 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.012181 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r6nps"] Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.014193 4762 scope.go:117] "RemoveContainer" containerID="56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.061834 4762 scope.go:117] "RemoveContainer" containerID="bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e" Oct 09 16:07:31 crc kubenswrapper[4762]: E1009 16:07:31.062226 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e\": container with ID starting with bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e not found: ID does not exist" containerID="bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.062271 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e"} err="failed to get container status \"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e\": rpc error: code = NotFound desc = could not find container \"bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e\": container with ID starting with bd9b37ba35317b9b766c465de90640e0ec0f9ffbdd11b8ddc2374db5911da19e not found: ID does not exist" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.062295 4762 scope.go:117] "RemoveContainer" containerID="2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894" Oct 09 16:07:31 crc kubenswrapper[4762]: E1009 16:07:31.062622 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894\": container with ID starting with 2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894 not found: ID does not exist" containerID="2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.062674 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894"} err="failed to get container status \"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894\": rpc error: code = NotFound desc = could not find container \"2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894\": container with ID starting with 2b85c8b78c44e2580507ad3d3cb52f912e04430962ff5cf890401d84d5083894 not found: ID does not exist" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.062701 4762 scope.go:117] "RemoveContainer" containerID="56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684" Oct 09 16:07:31 crc kubenswrapper[4762]: E1009 16:07:31.063853 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684\": container with ID starting with 56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684 not found: ID does not exist" containerID="56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684" Oct 09 16:07:31 crc kubenswrapper[4762]: I1009 16:07:31.063991 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684"} err="failed to get container status \"56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684\": rpc error: code = NotFound desc = could not find container \"56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684\": container with ID starting with 56d3a84810379104164590e0a16b68f8711fd668c2c5d82cec0555fd42dba684 not found: ID does not exist" Oct 09 16:07:32 crc kubenswrapper[4762]: I1009 16:07:32.977109 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" path="/var/lib/kubelet/pods/b69b59e3-a47e-4e15-9439-f8a072154016/volumes" Oct 09 16:07:37 crc kubenswrapper[4762]: I1009 16:07:37.729020 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:37 crc kubenswrapper[4762]: I1009 16:07:37.784986 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:38 crc kubenswrapper[4762]: I1009 16:07:38.254941 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:38 crc kubenswrapper[4762]: I1009 16:07:38.981272 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:07:38 crc kubenswrapper[4762]: E1009 16:07:38.987919 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:07:39 crc kubenswrapper[4762]: I1009 16:07:39.042490 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qqpcp" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="registry-server" containerID="cri-o://12ba493a1654cf0aa22d0f7c60cb4a135afbb11d1f3787638a876893233c54b9" gracePeriod=2 Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.055344 4762 generic.go:334] "Generic (PLEG): container finished" podID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerID="12ba493a1654cf0aa22d0f7c60cb4a135afbb11d1f3787638a876893233c54b9" exitCode=0 Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.055405 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerDied","Data":"12ba493a1654cf0aa22d0f7c60cb4a135afbb11d1f3787638a876893233c54b9"} Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.056500 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qqpcp" event={"ID":"c40376a5-9d4d-4ad5-9e17-d008f4b70e23","Type":"ContainerDied","Data":"d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7"} Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.056585 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0a60631c3e63c7f60f3f8f74585d298609c6775cea4a1e9d248f11bba80cac7" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.117841 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.274733 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities\") pod \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.274999 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content\") pod \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.275096 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gtn6\" (UniqueName: \"kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6\") pod \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\" (UID: \"c40376a5-9d4d-4ad5-9e17-d008f4b70e23\") " Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.275742 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities" (OuterVolumeSpecName: "utilities") pod "c40376a5-9d4d-4ad5-9e17-d008f4b70e23" (UID: "c40376a5-9d4d-4ad5-9e17-d008f4b70e23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.281165 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6" (OuterVolumeSpecName: "kube-api-access-4gtn6") pod "c40376a5-9d4d-4ad5-9e17-d008f4b70e23" (UID: "c40376a5-9d4d-4ad5-9e17-d008f4b70e23"). InnerVolumeSpecName "kube-api-access-4gtn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.377157 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gtn6\" (UniqueName: \"kubernetes.io/projected/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-kube-api-access-4gtn6\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.377463 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.382085 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c40376a5-9d4d-4ad5-9e17-d008f4b70e23" (UID: "c40376a5-9d4d-4ad5-9e17-d008f4b70e23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:07:40 crc kubenswrapper[4762]: I1009 16:07:40.479410 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c40376a5-9d4d-4ad5-9e17-d008f4b70e23-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:07:41 crc kubenswrapper[4762]: I1009 16:07:41.067607 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qqpcp" Oct 09 16:07:41 crc kubenswrapper[4762]: I1009 16:07:41.100885 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:41 crc kubenswrapper[4762]: I1009 16:07:41.110434 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qqpcp"] Oct 09 16:07:42 crc kubenswrapper[4762]: I1009 16:07:42.978448 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" path="/var/lib/kubelet/pods/c40376a5-9d4d-4ad5-9e17-d008f4b70e23/volumes" Oct 09 16:07:49 crc kubenswrapper[4762]: I1009 16:07:49.966015 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:07:49 crc kubenswrapper[4762]: E1009 16:07:49.966877 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:08:00 crc kubenswrapper[4762]: I1009 16:08:00.975993 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:08:00 crc kubenswrapper[4762]: E1009 16:08:00.977010 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:08:15 crc kubenswrapper[4762]: I1009 16:08:15.966196 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:08:15 crc kubenswrapper[4762]: E1009 16:08:15.967036 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:08:30 crc kubenswrapper[4762]: I1009 16:08:30.973703 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:08:30 crc kubenswrapper[4762]: E1009 16:08:30.975967 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:08:42 crc kubenswrapper[4762]: I1009 16:08:42.965369 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:08:42 crc kubenswrapper[4762]: E1009 16:08:42.966155 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:08:53 crc kubenswrapper[4762]: I1009 16:08:53.965228 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:08:53 crc kubenswrapper[4762]: E1009 16:08:53.965980 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:09:07 crc kubenswrapper[4762]: I1009 16:09:07.965710 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:09:07 crc kubenswrapper[4762]: E1009 16:09:07.966652 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:09:21 crc kubenswrapper[4762]: I1009 16:09:21.965120 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:09:21 crc kubenswrapper[4762]: E1009 16:09:21.967112 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:09:32 crc kubenswrapper[4762]: I1009 16:09:32.965159 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:09:32 crc kubenswrapper[4762]: E1009 16:09:32.966015 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:09:47 crc kubenswrapper[4762]: I1009 16:09:47.966288 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:09:47 crc kubenswrapper[4762]: E1009 16:09:47.967096 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:09:54 crc kubenswrapper[4762]: I1009 16:09:54.060300 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 16:09:54 crc kubenswrapper[4762]: I1009 16:09:54.061215 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="84d69cfe-07e8-45e6-95b4-05bf716a658c" containerName="adoption" containerID="cri-o://f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74" gracePeriod=30 Oct 09 16:09:59 crc kubenswrapper[4762]: I1009 16:09:59.965671 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:09:59 crc kubenswrapper[4762]: E1009 16:09:59.966654 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:10:14 crc kubenswrapper[4762]: I1009 16:10:14.977209 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:10:14 crc kubenswrapper[4762]: E1009 16:10:14.989139 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.620431 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.757678 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwdqn\" (UniqueName: \"kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn\") pod \"84d69cfe-07e8-45e6-95b4-05bf716a658c\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.759797 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") pod \"84d69cfe-07e8-45e6-95b4-05bf716a658c\" (UID: \"84d69cfe-07e8-45e6-95b4-05bf716a658c\") " Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.764586 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn" (OuterVolumeSpecName: "kube-api-access-cwdqn") pod "84d69cfe-07e8-45e6-95b4-05bf716a658c" (UID: "84d69cfe-07e8-45e6-95b4-05bf716a658c"). InnerVolumeSpecName "kube-api-access-cwdqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.783171 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342" (OuterVolumeSpecName: "mariadb-data") pod "84d69cfe-07e8-45e6-95b4-05bf716a658c" (UID: "84d69cfe-07e8-45e6-95b4-05bf716a658c"). InnerVolumeSpecName "pvc-1858a751-5be1-4eb5-a226-0034f33f9342". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.839563 4762 generic.go:334] "Generic (PLEG): container finished" podID="84d69cfe-07e8-45e6-95b4-05bf716a658c" containerID="f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74" exitCode=137 Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.839627 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"84d69cfe-07e8-45e6-95b4-05bf716a658c","Type":"ContainerDied","Data":"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74"} Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.839691 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.839729 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"84d69cfe-07e8-45e6-95b4-05bf716a658c","Type":"ContainerDied","Data":"244c31ca3dd8b35c1dd50d5526e5b265715da2563d1740bf3bbaa2d1b66349ea"} Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.839753 4762 scope.go:117] "RemoveContainer" containerID="f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.863002 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwdqn\" (UniqueName: \"kubernetes.io/projected/84d69cfe-07e8-45e6-95b4-05bf716a658c-kube-api-access-cwdqn\") on node \"crc\" DevicePath \"\"" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.863059 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") on node \"crc\" " Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.889945 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.890855 4762 scope.go:117] "RemoveContainer" containerID="f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74" Oct 09 16:10:24 crc kubenswrapper[4762]: E1009 16:10:24.891344 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74\": container with ID starting with f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74 not found: ID does not exist" containerID="f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.891377 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74"} err="failed to get container status \"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74\": rpc error: code = NotFound desc = could not find container \"f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74\": container with ID starting with f4a43743f4f7a7348f4eafe0b64bb93d7aa3234ef74884c26b4ac82672aa6f74 not found: ID does not exist" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.901221 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.911392 4762 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.911601 4762 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-1858a751-5be1-4eb5-a226-0034f33f9342" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342") on node "crc" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.964558 4762 reconciler_common.go:293] "Volume detached for volume \"pvc-1858a751-5be1-4eb5-a226-0034f33f9342\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1858a751-5be1-4eb5-a226-0034f33f9342\") on node \"crc\" DevicePath \"\"" Oct 09 16:10:24 crc kubenswrapper[4762]: I1009 16:10:24.986135 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84d69cfe-07e8-45e6-95b4-05bf716a658c" path="/var/lib/kubelet/pods/84d69cfe-07e8-45e6-95b4-05bf716a658c/volumes" Oct 09 16:10:25 crc kubenswrapper[4762]: I1009 16:10:25.496460 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 09 16:10:25 crc kubenswrapper[4762]: I1009 16:10:25.496762 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="1df9a5a3-088f-428d-873a-30b7b82507e9" containerName="adoption" containerID="cri-o://79be7ffe98523fed7899eef46ecc9d0bbe47f0a54c1f2cce7eafe7eb8f6dd344" gracePeriod=30 Oct 09 16:10:25 crc kubenswrapper[4762]: I1009 16:10:25.965242 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:10:25 crc kubenswrapper[4762]: E1009 16:10:25.965613 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:10:37 crc kubenswrapper[4762]: I1009 16:10:37.966065 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:10:37 crc kubenswrapper[4762]: E1009 16:10:37.967071 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:10:48 crc kubenswrapper[4762]: I1009 16:10:48.965193 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:10:50 crc kubenswrapper[4762]: I1009 16:10:50.109446 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d"} Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.170114 4762 generic.go:334] "Generic (PLEG): container finished" podID="1df9a5a3-088f-428d-873a-30b7b82507e9" containerID="79be7ffe98523fed7899eef46ecc9d0bbe47f0a54c1f2cce7eafe7eb8f6dd344" exitCode=137 Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.170206 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"1df9a5a3-088f-428d-873a-30b7b82507e9","Type":"ContainerDied","Data":"79be7ffe98523fed7899eef46ecc9d0bbe47f0a54c1f2cce7eafe7eb8f6dd344"} Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.172808 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"1df9a5a3-088f-428d-873a-30b7b82507e9","Type":"ContainerDied","Data":"12599279f93c2bffdf711cfcaf3998c2ba61cabf1a4951dbef74458167500bd4"} Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.172876 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12599279f93c2bffdf711cfcaf3998c2ba61cabf1a4951dbef74458167500bd4" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.180078 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.318788 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") pod \"1df9a5a3-088f-428d-873a-30b7b82507e9\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.318933 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2cw8\" (UniqueName: \"kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8\") pod \"1df9a5a3-088f-428d-873a-30b7b82507e9\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.319197 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert\") pod \"1df9a5a3-088f-428d-873a-30b7b82507e9\" (UID: \"1df9a5a3-088f-428d-873a-30b7b82507e9\") " Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.327793 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "1df9a5a3-088f-428d-873a-30b7b82507e9" (UID: "1df9a5a3-088f-428d-873a-30b7b82507e9"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.327903 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8" (OuterVolumeSpecName: "kube-api-access-j2cw8") pod "1df9a5a3-088f-428d-873a-30b7b82507e9" (UID: "1df9a5a3-088f-428d-873a-30b7b82507e9"). InnerVolumeSpecName "kube-api-access-j2cw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.346214 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc" (OuterVolumeSpecName: "ovn-data") pod "1df9a5a3-088f-428d-873a-30b7b82507e9" (UID: "1df9a5a3-088f-428d-873a-30b7b82507e9"). InnerVolumeSpecName "pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.422653 4762 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") on node \"crc\" " Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.422718 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2cw8\" (UniqueName: \"kubernetes.io/projected/1df9a5a3-088f-428d-873a-30b7b82507e9-kube-api-access-j2cw8\") on node \"crc\" DevicePath \"\"" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.422733 4762 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/1df9a5a3-088f-428d-873a-30b7b82507e9-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.454431 4762 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.454625 4762 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc") on node "crc" Oct 09 16:10:56 crc kubenswrapper[4762]: I1009 16:10:56.524806 4762 reconciler_common.go:293] "Volume detached for volume \"pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ec7d5c9e-6e84-4d3d-911b-2e8899e052cc\") on node \"crc\" DevicePath \"\"" Oct 09 16:10:57 crc kubenswrapper[4762]: I1009 16:10:57.188915 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 09 16:10:57 crc kubenswrapper[4762]: I1009 16:10:57.220750 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Oct 09 16:10:57 crc kubenswrapper[4762]: I1009 16:10:57.238461 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Oct 09 16:10:58 crc kubenswrapper[4762]: I1009 16:10:58.978866 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df9a5a3-088f-428d-873a-30b7b82507e9" path="/var/lib/kubelet/pods/1df9a5a3-088f-428d-873a-30b7b82507e9/volumes" Oct 09 16:11:39 crc kubenswrapper[4762]: I1009 16:11:39.863179 4762 scope.go:117] "RemoveContainer" containerID="79be7ffe98523fed7899eef46ecc9d0bbe47f0a54c1f2cce7eafe7eb8f6dd344" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.804606 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wqwdn/must-gather-rgxb2"] Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.805985 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="extract-utilities" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806006 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="extract-utilities" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806031 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="extract-utilities" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806043 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="extract-utilities" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806090 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="461c7940-1521-4400-8973-25f23794ccc6" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806101 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="461c7940-1521-4400-8973-25f23794ccc6" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806117 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84d69cfe-07e8-45e6-95b4-05bf716a658c" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806125 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="84d69cfe-07e8-45e6-95b4-05bf716a658c" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806133 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df9a5a3-088f-428d-873a-30b7b82507e9" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806141 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df9a5a3-088f-428d-873a-30b7b82507e9" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806163 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="extract-content" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806171 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="extract-content" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806188 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806196 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806215 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806223 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: E1009 16:12:02.806239 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="extract-content" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806246 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="extract-content" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806520 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="461c7940-1521-4400-8973-25f23794ccc6" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806544 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df9a5a3-088f-428d-873a-30b7b82507e9" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806561 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="84d69cfe-07e8-45e6-95b4-05bf716a658c" containerName="adoption" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806575 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="c40376a5-9d4d-4ad5-9e17-d008f4b70e23" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.806595 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69b59e3-a47e-4e15-9439-f8a072154016" containerName="registry-server" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.808246 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.810413 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wqwdn"/"openshift-service-ca.crt" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.810570 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wqwdn"/"kube-root-ca.crt" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.810717 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wqwdn"/"default-dockercfg-r4tjt" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.819150 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wqwdn/must-gather-rgxb2"] Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.894148 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.894345 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xcgd\" (UniqueName: \"kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.995402 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.996021 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:02 crc kubenswrapper[4762]: I1009 16:12:02.997032 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xcgd\" (UniqueName: \"kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:03 crc kubenswrapper[4762]: I1009 16:12:03.019267 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xcgd\" (UniqueName: \"kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd\") pod \"must-gather-rgxb2\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:03 crc kubenswrapper[4762]: I1009 16:12:03.137126 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:12:03 crc kubenswrapper[4762]: I1009 16:12:03.672111 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wqwdn/must-gather-rgxb2"] Oct 09 16:12:03 crc kubenswrapper[4762]: I1009 16:12:03.678297 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 16:12:03 crc kubenswrapper[4762]: I1009 16:12:03.901137 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" event={"ID":"91ed9013-99d2-4cb1-a5d5-50782c7e1de6","Type":"ContainerStarted","Data":"89c4b400a8bce6355a6881d991991c4f1fc0779141072478932baf287c8ec774"} Oct 09 16:12:08 crc kubenswrapper[4762]: I1009 16:12:08.952980 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" event={"ID":"91ed9013-99d2-4cb1-a5d5-50782c7e1de6","Type":"ContainerStarted","Data":"f669307b065a9cd7a1f74e7173ce08d354e5bca6cd1cd356cf3273ee8fa4892f"} Oct 09 16:12:08 crc kubenswrapper[4762]: I1009 16:12:08.953396 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" event={"ID":"91ed9013-99d2-4cb1-a5d5-50782c7e1de6","Type":"ContainerStarted","Data":"9017956e70993ef91cc3d8c3525429b6552139170f028d01d2040e77fe5a9a24"} Oct 09 16:12:08 crc kubenswrapper[4762]: I1009 16:12:08.978920 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" podStartSLOduration=2.861522534 podStartE2EDuration="6.978895127s" podCreationTimestamp="2025-10-09 16:12:02 +0000 UTC" firstStartedPulling="2025-10-09 16:12:03.678007476 +0000 UTC m=+9999.451798525" lastFinishedPulling="2025-10-09 16:12:07.795380079 +0000 UTC m=+10003.569171118" observedRunningTime="2025-10-09 16:12:08.969185144 +0000 UTC m=+10004.742976183" watchObservedRunningTime="2025-10-09 16:12:08.978895127 +0000 UTC m=+10004.752686166" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.493374 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-7d5dm"] Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.495793 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.520329 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.520454 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgjv6\" (UniqueName: \"kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.623052 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.623177 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.623196 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgjv6\" (UniqueName: \"kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.660015 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgjv6\" (UniqueName: \"kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6\") pod \"crc-debug-7d5dm\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:12 crc kubenswrapper[4762]: I1009 16:12:12.823298 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:13 crc kubenswrapper[4762]: I1009 16:12:13.028342 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" event={"ID":"8005d37e-3ca6-4620-a465-111681023cbc","Type":"ContainerStarted","Data":"0d241d64a17bb6c96d962bf58a6cbdc0371afc5be4b806b9a2e487e6c7604d5d"} Oct 09 16:12:28 crc kubenswrapper[4762]: I1009 16:12:28.228739 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" event={"ID":"8005d37e-3ca6-4620-a465-111681023cbc","Type":"ContainerStarted","Data":"52622cc36b74d5f69545020d520e5f8084765d54d5ea732ec4b5ec29f10d232e"} Oct 09 16:12:28 crc kubenswrapper[4762]: I1009 16:12:28.251573 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" podStartSLOduration=1.418618623 podStartE2EDuration="16.251557634s" podCreationTimestamp="2025-10-09 16:12:12 +0000 UTC" firstStartedPulling="2025-10-09 16:12:12.890979166 +0000 UTC m=+10008.664770195" lastFinishedPulling="2025-10-09 16:12:27.723918177 +0000 UTC m=+10023.497709206" observedRunningTime="2025-10-09 16:12:28.241727477 +0000 UTC m=+10024.015518516" watchObservedRunningTime="2025-10-09 16:12:28.251557634 +0000 UTC m=+10024.025348673" Oct 09 16:12:46 crc kubenswrapper[4762]: I1009 16:12:46.443292 4762 generic.go:334] "Generic (PLEG): container finished" podID="8005d37e-3ca6-4620-a465-111681023cbc" containerID="52622cc36b74d5f69545020d520e5f8084765d54d5ea732ec4b5ec29f10d232e" exitCode=0 Oct 09 16:12:46 crc kubenswrapper[4762]: I1009 16:12:46.443381 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" event={"ID":"8005d37e-3ca6-4620-a465-111681023cbc","Type":"ContainerDied","Data":"52622cc36b74d5f69545020d520e5f8084765d54d5ea732ec4b5ec29f10d232e"} Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.601780 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.639376 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-7d5dm"] Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.639423 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgjv6\" (UniqueName: \"kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6\") pod \"8005d37e-3ca6-4620-a465-111681023cbc\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.639492 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host\") pod \"8005d37e-3ca6-4620-a465-111681023cbc\" (UID: \"8005d37e-3ca6-4620-a465-111681023cbc\") " Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.639608 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host" (OuterVolumeSpecName: "host") pod "8005d37e-3ca6-4620-a465-111681023cbc" (UID: "8005d37e-3ca6-4620-a465-111681023cbc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.640397 4762 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8005d37e-3ca6-4620-a465-111681023cbc-host\") on node \"crc\" DevicePath \"\"" Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.647800 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6" (OuterVolumeSpecName: "kube-api-access-dgjv6") pod "8005d37e-3ca6-4620-a465-111681023cbc" (UID: "8005d37e-3ca6-4620-a465-111681023cbc"). InnerVolumeSpecName "kube-api-access-dgjv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.655350 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-7d5dm"] Oct 09 16:12:47 crc kubenswrapper[4762]: I1009 16:12:47.742762 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgjv6\" (UniqueName: \"kubernetes.io/projected/8005d37e-3ca6-4620-a465-111681023cbc-kube-api-access-dgjv6\") on node \"crc\" DevicePath \"\"" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.467788 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d241d64a17bb6c96d962bf58a6cbdc0371afc5be4b806b9a2e487e6c7604d5d" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.467844 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-7d5dm" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.808018 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-xm8cj"] Oct 09 16:12:48 crc kubenswrapper[4762]: E1009 16:12:48.808623 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8005d37e-3ca6-4620-a465-111681023cbc" containerName="container-00" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.808696 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="8005d37e-3ca6-4620-a465-111681023cbc" containerName="container-00" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.809022 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="8005d37e-3ca6-4620-a465-111681023cbc" containerName="container-00" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.810115 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.865457 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgdxr\" (UniqueName: \"kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.865721 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.968451 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.968626 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.968674 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgdxr\" (UniqueName: \"kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:48 crc kubenswrapper[4762]: I1009 16:12:48.978705 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8005d37e-3ca6-4620-a465-111681023cbc" path="/var/lib/kubelet/pods/8005d37e-3ca6-4620-a465-111681023cbc/volumes" Oct 09 16:12:49 crc kubenswrapper[4762]: I1009 16:12:49.204129 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgdxr\" (UniqueName: \"kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr\") pod \"crc-debug-xm8cj\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:49 crc kubenswrapper[4762]: I1009 16:12:49.429553 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:49 crc kubenswrapper[4762]: W1009 16:12:49.463219 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01048d59_7eca_4326_bc71_1a2fa9038189.slice/crio-c007f18973042bd11caa822a86118184d7402f2d3513818da41a165150b84d44 WatchSource:0}: Error finding container c007f18973042bd11caa822a86118184d7402f2d3513818da41a165150b84d44: Status 404 returned error can't find the container with id c007f18973042bd11caa822a86118184d7402f2d3513818da41a165150b84d44 Oct 09 16:12:49 crc kubenswrapper[4762]: I1009 16:12:49.500349 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" event={"ID":"01048d59-7eca-4326-bc71-1a2fa9038189","Type":"ContainerStarted","Data":"c007f18973042bd11caa822a86118184d7402f2d3513818da41a165150b84d44"} Oct 09 16:12:50 crc kubenswrapper[4762]: I1009 16:12:50.512940 4762 generic.go:334] "Generic (PLEG): container finished" podID="01048d59-7eca-4326-bc71-1a2fa9038189" containerID="f8c06fe830ae7f93cd64f90241892c492f03b046eae891398c3bdd6931ad28c7" exitCode=1 Oct 09 16:12:50 crc kubenswrapper[4762]: I1009 16:12:50.513042 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" event={"ID":"01048d59-7eca-4326-bc71-1a2fa9038189","Type":"ContainerDied","Data":"f8c06fe830ae7f93cd64f90241892c492f03b046eae891398c3bdd6931ad28c7"} Oct 09 16:12:50 crc kubenswrapper[4762]: I1009 16:12:50.558519 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-xm8cj"] Oct 09 16:12:50 crc kubenswrapper[4762]: I1009 16:12:50.569672 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wqwdn/crc-debug-xm8cj"] Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.648159 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.730731 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host\") pod \"01048d59-7eca-4326-bc71-1a2fa9038189\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.730830 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host" (OuterVolumeSpecName: "host") pod "01048d59-7eca-4326-bc71-1a2fa9038189" (UID: "01048d59-7eca-4326-bc71-1a2fa9038189"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.731069 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgdxr\" (UniqueName: \"kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr\") pod \"01048d59-7eca-4326-bc71-1a2fa9038189\" (UID: \"01048d59-7eca-4326-bc71-1a2fa9038189\") " Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.731820 4762 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/01048d59-7eca-4326-bc71-1a2fa9038189-host\") on node \"crc\" DevicePath \"\"" Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.743346 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr" (OuterVolumeSpecName: "kube-api-access-qgdxr") pod "01048d59-7eca-4326-bc71-1a2fa9038189" (UID: "01048d59-7eca-4326-bc71-1a2fa9038189"). InnerVolumeSpecName "kube-api-access-qgdxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:12:51 crc kubenswrapper[4762]: I1009 16:12:51.833487 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgdxr\" (UniqueName: \"kubernetes.io/projected/01048d59-7eca-4326-bc71-1a2fa9038189-kube-api-access-qgdxr\") on node \"crc\" DevicePath \"\"" Oct 09 16:12:52 crc kubenswrapper[4762]: I1009 16:12:52.536314 4762 scope.go:117] "RemoveContainer" containerID="f8c06fe830ae7f93cd64f90241892c492f03b046eae891398c3bdd6931ad28c7" Oct 09 16:12:52 crc kubenswrapper[4762]: I1009 16:12:52.536359 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/crc-debug-xm8cj" Oct 09 16:12:52 crc kubenswrapper[4762]: I1009 16:12:52.977086 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01048d59-7eca-4326-bc71-1a2fa9038189" path="/var/lib/kubelet/pods/01048d59-7eca-4326-bc71-1a2fa9038189/volumes" Oct 09 16:13:11 crc kubenswrapper[4762]: I1009 16:13:11.969606 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:13:11 crc kubenswrapper[4762]: I1009 16:13:11.970242 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.432680 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87c8f7e7-8d9d-49aa-aa9f-9702e83e9331/init-config-reloader/0.log" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.665859 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87c8f7e7-8d9d-49aa-aa9f-9702e83e9331/init-config-reloader/0.log" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.708126 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87c8f7e7-8d9d-49aa-aa9f-9702e83e9331/alertmanager/0.log" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.763852 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_87c8f7e7-8d9d-49aa-aa9f-9702e83e9331/config-reloader/0.log" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.914095 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_ca316e06-1d0d-4b25-88ee-ca4c74f0f48c/aodh-api/0.log" Oct 09 16:13:34 crc kubenswrapper[4762]: I1009 16:13:34.978992 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_ca316e06-1d0d-4b25-88ee-ca4c74f0f48c/aodh-evaluator/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.143790 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_ca316e06-1d0d-4b25-88ee-ca4c74f0f48c/aodh-notifier/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.175897 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_ca316e06-1d0d-4b25-88ee-ca4c74f0f48c/aodh-listener/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.374758 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68978b84fd-g7kjs_1906dda0-58cf-428f-8c14-6d0a7385cc1f/barbican-api/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.414451 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-68978b84fd-g7kjs_1906dda0-58cf-428f-8c14-6d0a7385cc1f/barbican-api-log/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.583383 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6dcc6c5b54-fpnl8_d7d04d08-8a53-47ee-9a23-17a13763b032/barbican-keystone-listener/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.645229 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6dcc6c5b54-fpnl8_d7d04d08-8a53-47ee-9a23-17a13763b032/barbican-keystone-listener-log/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.777440 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65686d9dff-wlj6h_6e87b828-195d-4441-9e8a-5075c9e29b05/barbican-worker/0.log" Oct 09 16:13:35 crc kubenswrapper[4762]: I1009 16:13:35.846894 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65686d9dff-wlj6h_6e87b828-195d-4441-9e8a-5075c9e29b05/barbican-worker-log/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.020961 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-ft7tq_74f4464f-713c-4ace-9657-31e83a483ae7/bootstrap-openstack-openstack-cell1/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.245792 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cb316210-6500-47c6-956c-58c488a90b28/ceilometer-central-agent/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.250025 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cb316210-6500-47c6-956c-58c488a90b28/ceilometer-notification-agent/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.321758 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cb316210-6500-47c6-956c-58c488a90b28/proxy-httpd/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.431553 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_cb316210-6500-47c6-956c-58c488a90b28/sg-core/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.523322 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-gp9tb_9aea25b2-f748-46ba-9f78-821c7d1a5451/ceph-client-openstack-openstack-cell1/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.736411 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_967cd183-8741-455b-ba80-eda15fdc1681/cinder-api-log/0.log" Oct 09 16:13:36 crc kubenswrapper[4762]: I1009 16:13:36.815981 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_967cd183-8741-455b-ba80-eda15fdc1681/cinder-api/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.076208 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ce78c95b-85d3-453f-9922-2937d0e578f8/cinder-backup/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.078319 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_ce78c95b-85d3-453f-9922-2937d0e578f8/probe/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.301185 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a98fcbc6-94cf-4c55-8cf2-79d469088e45/cinder-scheduler/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.389949 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a98fcbc6-94cf-4c55-8cf2-79d469088e45/probe/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.485404 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_6a852936-bff5-48f8-9336-cffaeb34743f/cinder-volume/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.588274 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_6a852936-bff5-48f8-9336-cffaeb34743f/probe/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.736200 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-4mhld_3e25d87c-a7f5-4c9d-b1e4-552517e15174/configure-network-openstack-openstack-cell1/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.818991 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-zfcmt_0b649b90-58af-4419-8433-f83b4e793b9f/configure-os-openstack-openstack-cell1/0.log" Oct 09 16:13:37 crc kubenswrapper[4762]: I1009 16:13:37.931213 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74fc4ff4bc-5x75j_5c397658-db91-4151-b5de-2aac1e540cf1/init/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.117077 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74fc4ff4bc-5x75j_5c397658-db91-4151-b5de-2aac1e540cf1/init/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.163934 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-74fc4ff4bc-5x75j_5c397658-db91-4151-b5de-2aac1e540cf1/dnsmasq-dns/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.257689 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-4dtq9_53168398-1447-4747-9b52-02f61f828508/download-cache-openstack-openstack-cell1/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.387198 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9eab9237-9f33-4b78-8416-7b0aef0b1a18/glance-httpd/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.460676 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9eab9237-9f33-4b78-8416-7b0aef0b1a18/glance-log/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.596342 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_66e2e5e7-f382-48a2-bea7-06768a36b7af/glance-httpd/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.661489 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_66e2e5e7-f382-48a2-bea7-06768a36b7af/glance-log/0.log" Oct 09 16:13:38 crc kubenswrapper[4762]: I1009 16:13:38.846716 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-dfb6f84ff-tqggt_84222e59-350f-4965-9269-c7d697837f75/heat-api/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.018254 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-575dbc5777-btq8r_81effce6-22e8-4a42-9087-decb53950618/heat-cfnapi/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.053851 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-64d559cb64-b8slk_7f131e3f-02b4-400e-82f3-2b7c22a93c6d/heat-engine/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.299760 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6df886d647-szb9q_d418fd45-a3dc-46ca-8d2b-3e82ba33d483/horizon/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.375065 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6df886d647-szb9q_d418fd45-a3dc-46ca-8d2b-3e82ba33d483/horizon-log/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.434548 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-sbxmm_d2d7c4de-054b-4396-984f-a0e55657c9d2/install-certs-openstack-openstack-cell1/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.595154 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-vxnnh_f4249fbb-12c1-4788-8fb6-5915e85139c1/install-os-openstack-openstack-cell1/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.838771 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29333701-x8pdl_7ceaf6c8-14e1-4b7a-b4be-7cc671d642bf/keystone-cron/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.864037 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7f9d7865f9-j75xk_445db2dd-8394-428c-9859-34882f87cefa/keystone-api/0.log" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.951146 4762 scope.go:117] "RemoveContainer" containerID="12ba493a1654cf0aa22d0f7c60cb4a135afbb11d1f3787638a876893233c54b9" Oct 09 16:13:39 crc kubenswrapper[4762]: I1009 16:13:39.974020 4762 scope.go:117] "RemoveContainer" containerID="f166352d5ebdcd51be3836213c75a7d5a239535fc54581b3727c4e757a7d8363" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.006319 4762 scope.go:117] "RemoveContainer" containerID="8682d070a5d5a67e5a5e0894f6c274f275ab147c0bdcf1c8c37087feed75c247" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.026815 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29333761-hmxz6_93a0a4a8-5710-4928-865e-ecb19bfdb7b0/keystone-cron/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.098391 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_4095c002-3425-4536-be08-4adb623d6b61/kube-state-metrics/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.243488 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-cczp4_6d4bed23-7613-448e-937a-c4d0467c3e57/libvirt-openstack-openstack-cell1/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.475525 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_4636ea01-4aef-4d23-b4fb-93426838eeac/manila-api-log/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.507753 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_4636ea01-4aef-4d23-b4fb-93426838eeac/manila-api/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.657362 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_aaa6da22-9b2f-478f-9a49-c4fb27b61058/probe/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.666113 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_aaa6da22-9b2f-478f-9a49-c4fb27b61058/manila-scheduler/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.724305 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_681131ae-c6fd-457b-b4a2-5605d5e08f69/manila-share/0.log" Oct 09 16:13:40 crc kubenswrapper[4762]: I1009 16:13:40.855347 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_681131ae-c6fd-457b-b4a2-5605d5e08f69/probe/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.218360 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7c86b5d68f-bbzc9_28024491-18be-45b0-b38f-a5d3cc418127/neutron-httpd/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.237153 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7c86b5d68f-bbzc9_28024491-18be-45b0-b38f-a5d3cc418127/neutron-api/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.500557 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-v7kr8_c1c60f49-ddad-480e-b318-fbff83ab32a5/neutron-dhcp-openstack-openstack-cell1/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.625216 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-gn57g_4958a251-a361-4786-ad42-3474a924f291/neutron-metadata-openstack-openstack-cell1/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.853905 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-7knjf_55195197-ee16-4c06-aaf3-992e9fbba8c6/neutron-sriov-openstack-openstack-cell1/0.log" Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.968980 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:13:41 crc kubenswrapper[4762]: I1009 16:13:41.969031 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:13:42 crc kubenswrapper[4762]: I1009 16:13:42.179360 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_fe462185-8765-4389-b162-e73854d7eb61/nova-api-api/0.log" Oct 09 16:13:42 crc kubenswrapper[4762]: I1009 16:13:42.221206 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_fe462185-8765-4389-b162-e73854d7eb61/nova-api-log/0.log" Oct 09 16:13:42 crc kubenswrapper[4762]: I1009 16:13:42.444866 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_892e2b35-4076-4ae7-b81c-95beca001408/nova-cell0-conductor-conductor/0.log" Oct 09 16:13:43 crc kubenswrapper[4762]: I1009 16:13:43.254271 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ad631956-f573-4fcc-bfda-bbfece4bae8c/nova-cell1-conductor-conductor/0.log" Oct 09 16:13:43 crc kubenswrapper[4762]: I1009 16:13:43.257954 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_17576139-60b8-4084-ab82-dddbc2736e43/nova-cell1-novncproxy-novncproxy/0.log" Oct 09 16:13:43 crc kubenswrapper[4762]: I1009 16:13:43.739886 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellrrnnv_461c7940-1521-4400-8973-25f23794ccc6/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Oct 09 16:13:43 crc kubenswrapper[4762]: I1009 16:13:43.810511 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_1e89b8e6-2d52-403c-b7a9-b59ad3b199ba/memcached/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.050332 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-hxbfw_a2527d19-46b0-464c-9eab-69e73c1aceea/nova-cell1-openstack-openstack-cell1/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.087311 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f29764a4-4682-4b9e-b089-c3e59c2a9489/nova-metadata-metadata/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.101205 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f29764a4-4682-4b9e-b089-c3e59c2a9489/nova-metadata-log/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.301198 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6fdffd4774-9qpx5_824b22e5-15dc-4560-be38-879a39e3175f/init/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.401860 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_eafc81b3-49af-41be-9689-964ee18e7fc7/nova-scheduler-scheduler/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.552536 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6fdffd4774-9qpx5_824b22e5-15dc-4560-be38-879a39e3175f/init/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.574118 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6fdffd4774-9qpx5_824b22e5-15dc-4560-be38-879a39e3175f/octavia-api-provider-agent/0.log" Oct 09 16:13:44 crc kubenswrapper[4762]: I1009 16:13:44.722532 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6fdffd4774-9qpx5_824b22e5-15dc-4560-be38-879a39e3175f/octavia-api/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.497211 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-ws564_4369b0bf-d12c-4398-b07f-554fef20c094/init/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.682883 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-ws564_4369b0bf-d12c-4398-b07f-554fef20c094/init/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.740835 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-ws564_4369b0bf-d12c-4398-b07f-554fef20c094/octavia-healthmanager/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.784620 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-czjdc_3fe23699-5018-4d7a-8f7f-102303da2fef/init/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.948894 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-czjdc_3fe23699-5018-4d7a-8f7f-102303da2fef/init/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.964739 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-czjdc_3fe23699-5018-4d7a-8f7f-102303da2fef/octavia-housekeeping/0.log" Oct 09 16:13:45 crc kubenswrapper[4762]: I1009 16:13:45.987023 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-j9l4k_417ca96a-987f-4daf-aacf-1115ce687ca3/init/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.143130 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-j9l4k_417ca96a-987f-4daf-aacf-1115ce687ca3/init/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.159555 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-j9l4k_417ca96a-987f-4daf-aacf-1115ce687ca3/octavia-rsyslog/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.239138 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-rdxn6_2710d4c1-ab14-4206-b148-b5347d99f703/init/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.463719 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-rdxn6_2710d4c1-ab14-4206-b148-b5347d99f703/init/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.545119 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c87d7be6-a929-4a4a-bd38-184fb1405635/mysql-bootstrap/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.563525 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-rdxn6_2710d4c1-ab14-4206-b148-b5347d99f703/octavia-worker/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.645815 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c87d7be6-a929-4a4a-bd38-184fb1405635/mysql-bootstrap/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.729145 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c87d7be6-a929-4a4a-bd38-184fb1405635/galera/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.809234 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2c4b6426-b32c-4839-b63d-75b2995ddc8c/mysql-bootstrap/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.886055 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2c4b6426-b32c-4839-b63d-75b2995ddc8c/mysql-bootstrap/0.log" Oct 09 16:13:46 crc kubenswrapper[4762]: I1009 16:13:46.949903 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2c4b6426-b32c-4839-b63d-75b2995ddc8c/galera/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.047601 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ab0657ec-dc49-40ca-b47f-5b17b550744e/openstackclient/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.167713 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jphs8_b14659e8-2771-4473-8ea7-a0d598c1030a/ovn-controller/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.300684 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tcrqk_347ebf92-b7d3-4407-b556-afe6c5121d88/openstack-network-exporter/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.404212 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7pjmm_7f289c54-5049-4ded-b960-57885a9525dd/ovsdb-server-init/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.595259 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7pjmm_7f289c54-5049-4ded-b960-57885a9525dd/ovsdb-server/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.615588 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7pjmm_7f289c54-5049-4ded-b960-57885a9525dd/ovsdb-server-init/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.633345 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7pjmm_7f289c54-5049-4ded-b960-57885a9525dd/ovs-vswitchd/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.785184 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_aa9c1c46-0da0-4f63-b219-9d716f3eb48f/openstack-network-exporter/0.log" Oct 09 16:13:47 crc kubenswrapper[4762]: I1009 16:13:47.830863 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_aa9c1c46-0da0-4f63-b219-9d716f3eb48f/ovn-northd/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.021111 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-c98nl_d18893bd-fa3b-45d1-a8ec-99d7c1daebe9/ovn-openstack-openstack-cell1/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.054644 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e89ac68-797b-44dc-9cae-772ebfc5eca6/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.143939 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3e89ac68-797b-44dc-9cae-772ebfc5eca6/ovsdbserver-nb/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.281303 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_50735fb3-df1e-4342-ad26-07c3d0122688/ovsdbserver-nb/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.288271 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_50735fb3-df1e-4342-ad26-07c3d0122688/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.435590 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_50181ec3-4201-4f45-aa83-956765556089/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.480603 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_50181ec3-4201-4f45-aa83-956765556089/ovsdbserver-nb/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.569590 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_18f94f81-220f-4213-a416-5a1c2b4a5f3d/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.617939 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_18f94f81-220f-4213-a416-5a1c2b4a5f3d/ovsdbserver-sb/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.715985 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_d4d58842-51b7-4201-b882-ce8f78057a12/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.770115 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_d4d58842-51b7-4201-b882-ce8f78057a12/ovsdbserver-sb/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.888556 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2/openstack-network-exporter/0.log" Oct 09 16:13:48 crc kubenswrapper[4762]: I1009 16:13:48.961794 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a94dcf0b-969d-4d7f-b3f4-ab97475e5bf2/ovsdbserver-sb/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.098440 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6795fbd6fb-6m2sc_ea337cfa-2396-4905-8a6d-3fe0997bbda0/placement-api/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.136043 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6795fbd6fb-6m2sc_ea337cfa-2396-4905-8a6d-3fe0997bbda0/placement-log/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.281465 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-c2xxdf_8043119c-8c14-4c3f-b587-4daa7d8b1dde/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.348009 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_d526887b-e7a2-4ad7-a1f1-1c4f376dac12/init-config-reloader/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.530481 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_d526887b-e7a2-4ad7-a1f1-1c4f376dac12/init-config-reloader/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.537523 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_d526887b-e7a2-4ad7-a1f1-1c4f376dac12/config-reloader/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.538536 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_d526887b-e7a2-4ad7-a1f1-1c4f376dac12/thanos-sidecar/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.560044 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_d526887b-e7a2-4ad7-a1f1-1c4f376dac12/prometheus/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.724075 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_96ee0476-093c-4e1e-ba2c-b2890600c1f3/setup-container/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.896844 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_96ee0476-093c-4e1e-ba2c-b2890600c1f3/setup-container/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.933420 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_96ee0476-093c-4e1e-ba2c-b2890600c1f3/rabbitmq/0.log" Oct 09 16:13:49 crc kubenswrapper[4762]: I1009 16:13:49.991610 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c5f79dc6-e3fe-4d27-889e-5ea96d334d8a/setup-container/0.log" Oct 09 16:13:50 crc kubenswrapper[4762]: I1009 16:13:50.186724 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c5f79dc6-e3fe-4d27-889e-5ea96d334d8a/setup-container/0.log" Oct 09 16:13:50 crc kubenswrapper[4762]: I1009 16:13:50.219394 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-djn8r_53f7fd79-a0c6-4da7-adce-674efdca56ec/reboot-os-openstack-openstack-cell1/0.log" Oct 09 16:13:50 crc kubenswrapper[4762]: I1009 16:13:50.478074 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-w2xrt_34250f79-9b18-46b1-9d68-07c18c78c268/run-os-openstack-openstack-cell1/0.log" Oct 09 16:13:50 crc kubenswrapper[4762]: I1009 16:13:50.746986 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-lmprt_5c072459-9381-4dc8-901d-1c530af82240/ssh-known-hosts-openstack/0.log" Oct 09 16:13:50 crc kubenswrapper[4762]: I1009 16:13:50.940244 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-rl6rc_846a86ca-f2c4-4aeb-b6f1-21a68ef1d6cb/telemetry-openstack-openstack-cell1/0.log" Oct 09 16:13:51 crc kubenswrapper[4762]: I1009 16:13:51.179784 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-w7hq5_37673896-c4d5-42e0-934f-1a36d759b2e2/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Oct 09 16:13:51 crc kubenswrapper[4762]: I1009 16:13:51.307848 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-882r6_11a15b07-738a-401b-a0c2-28c664777750/validate-network-openstack-openstack-cell1/0.log" Oct 09 16:13:52 crc kubenswrapper[4762]: I1009 16:13:52.146904 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c5f79dc6-e3fe-4d27-889e-5ea96d334d8a/rabbitmq/0.log" Oct 09 16:14:11 crc kubenswrapper[4762]: I1009 16:14:11.969188 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:14:11 crc kubenswrapper[4762]: I1009 16:14:11.970112 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:14:11 crc kubenswrapper[4762]: I1009 16:14:11.970175 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 16:14:11 crc kubenswrapper[4762]: I1009 16:14:11.970707 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 16:14:11 crc kubenswrapper[4762]: I1009 16:14:11.970778 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d" gracePeriod=600 Oct 09 16:14:12 crc kubenswrapper[4762]: I1009 16:14:12.364021 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d" exitCode=0 Oct 09 16:14:12 crc kubenswrapper[4762]: I1009 16:14:12.364107 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d"} Oct 09 16:14:12 crc kubenswrapper[4762]: I1009 16:14:12.364411 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerStarted","Data":"f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3"} Oct 09 16:14:12 crc kubenswrapper[4762]: I1009 16:14:12.364436 4762 scope.go:117] "RemoveContainer" containerID="be54fd088343d7e0163b1cf4c29e43ee76092488ba51af7bc31481982c5d4a46" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.173410 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf"] Oct 09 16:15:00 crc kubenswrapper[4762]: E1009 16:15:00.174569 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01048d59-7eca-4326-bc71-1a2fa9038189" containerName="container-00" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.174587 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="01048d59-7eca-4326-bc71-1a2fa9038189" containerName="container-00" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.174929 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="01048d59-7eca-4326-bc71-1a2fa9038189" containerName="container-00" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.175999 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.180234 4762 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.180455 4762 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.200962 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf"] Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.243463 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.243610 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhpj5\" (UniqueName: \"kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.243744 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.345289 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhpj5\" (UniqueName: \"kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.345421 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.345561 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.346554 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.804103 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:00 crc kubenswrapper[4762]: I1009 16:15:00.805194 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhpj5\" (UniqueName: \"kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5\") pod \"collect-profiles-29333775-mgkjf\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:01 crc kubenswrapper[4762]: I1009 16:15:01.104798 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:01 crc kubenswrapper[4762]: I1009 16:15:01.591001 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf"] Oct 09 16:15:01 crc kubenswrapper[4762]: I1009 16:15:01.860791 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" event={"ID":"00ebfa0c-ce5d-4713-b2f8-646ddb28417a","Type":"ContainerStarted","Data":"d20746291d998c89d6f7451e6bbd9178f856ee0853fa13fb508c3a6854b373ba"} Oct 09 16:15:01 crc kubenswrapper[4762]: I1009 16:15:01.861114 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" event={"ID":"00ebfa0c-ce5d-4713-b2f8-646ddb28417a","Type":"ContainerStarted","Data":"544c51acbaae01f65681386d8b90f8d8a90cfaac2636c289d7e5d92c7a0f9cf6"} Oct 09 16:15:01 crc kubenswrapper[4762]: I1009 16:15:01.881510 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" podStartSLOduration=1.881492652 podStartE2EDuration="1.881492652s" podCreationTimestamp="2025-10-09 16:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 16:15:01.880412223 +0000 UTC m=+10177.654203262" watchObservedRunningTime="2025-10-09 16:15:01.881492652 +0000 UTC m=+10177.655283691" Oct 09 16:15:02 crc kubenswrapper[4762]: I1009 16:15:02.873998 4762 generic.go:334] "Generic (PLEG): container finished" podID="00ebfa0c-ce5d-4713-b2f8-646ddb28417a" containerID="d20746291d998c89d6f7451e6bbd9178f856ee0853fa13fb508c3a6854b373ba" exitCode=0 Oct 09 16:15:02 crc kubenswrapper[4762]: I1009 16:15:02.874123 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" event={"ID":"00ebfa0c-ce5d-4713-b2f8-646ddb28417a","Type":"ContainerDied","Data":"d20746291d998c89d6f7451e6bbd9178f856ee0853fa13fb508c3a6854b373ba"} Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.215577 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.333577 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhpj5\" (UniqueName: \"kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5\") pod \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.334070 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume\") pod \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.334109 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume\") pod \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\" (UID: \"00ebfa0c-ce5d-4713-b2f8-646ddb28417a\") " Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.334568 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume" (OuterVolumeSpecName: "config-volume") pod "00ebfa0c-ce5d-4713-b2f8-646ddb28417a" (UID: "00ebfa0c-ce5d-4713-b2f8-646ddb28417a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.334727 4762 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.339843 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5" (OuterVolumeSpecName: "kube-api-access-nhpj5") pod "00ebfa0c-ce5d-4713-b2f8-646ddb28417a" (UID: "00ebfa0c-ce5d-4713-b2f8-646ddb28417a"). InnerVolumeSpecName "kube-api-access-nhpj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.340570 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "00ebfa0c-ce5d-4713-b2f8-646ddb28417a" (UID: "00ebfa0c-ce5d-4713-b2f8-646ddb28417a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.436615 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhpj5\" (UniqueName: \"kubernetes.io/projected/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-kube-api-access-nhpj5\") on node \"crc\" DevicePath \"\"" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.436896 4762 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00ebfa0c-ce5d-4713-b2f8-646ddb28417a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.657337 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7"] Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.668112 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333730-cdgz7"] Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.912522 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.912515 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333775-mgkjf" event={"ID":"00ebfa0c-ce5d-4713-b2f8-646ddb28417a","Type":"ContainerDied","Data":"544c51acbaae01f65681386d8b90f8d8a90cfaac2636c289d7e5d92c7a0f9cf6"} Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.912880 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="544c51acbaae01f65681386d8b90f8d8a90cfaac2636c289d7e5d92c7a0f9cf6" Oct 09 16:15:04 crc kubenswrapper[4762]: I1009 16:15:04.979416 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74c6b48b-c330-41fa-b422-0f80defc94d2" path="/var/lib/kubelet/pods/74c6b48b-c330-41fa-b422-0f80defc94d2/volumes" Oct 09 16:15:40 crc kubenswrapper[4762]: I1009 16:15:40.137361 4762 scope.go:117] "RemoveContainer" containerID="d23d2de335d69e0846d8b82920880fde6142785994d5d77ed400bb5c2210122d" Oct 09 16:15:43 crc kubenswrapper[4762]: I1009 16:15:43.776516 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/util/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.013711 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/pull/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.030197 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/pull/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.049493 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/util/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.239174 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/util/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.239954 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/extract/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.266192 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3efa7a3f52dfd25f555693463b15423e20f27c428253ed7107fc4edca96z7mg_13e464a6-1da6-4201-bd1f-0a2af8539e90/pull/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.451619 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-8qmk9_f19642d0-b3f1-4de9-811a-8bd523f204c1/kube-rbac-proxy/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.594225 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-z8s2p_b22d7fd7-7386-4c10-9133-7703f8f2e0b4/kube-rbac-proxy/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.595470 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-8qmk9_f19642d0-b3f1-4de9-811a-8bd523f204c1/manager/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.785439 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-z8s2p_b22d7fd7-7386-4c10-9133-7703f8f2e0b4/manager/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.818995 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-sn56k_0302ab2b-3fbe-4d08-8364-872d1c1be2b7/manager/0.log" Oct 09 16:15:44 crc kubenswrapper[4762]: I1009 16:15:44.851581 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-sn56k_0302ab2b-3fbe-4d08-8364-872d1c1be2b7/kube-rbac-proxy/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.033540 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-z4xhb_6484b20c-9ee2-4134-bbc6-5c57c175f1db/kube-rbac-proxy/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.184903 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-z4xhb_6484b20c-9ee2-4134-bbc6-5c57c175f1db/manager/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.297392 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-r6dpz_a226d4b7-ceef-4cfd-aeb8-727fb0c8786d/kube-rbac-proxy/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.366646 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-r6dpz_a226d4b7-ceef-4cfd-aeb8-727fb0c8786d/manager/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.422171 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-4n6tj_1b9fae99-ccfb-4f2e-9225-7eb67624ee5a/kube-rbac-proxy/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.518432 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-4n6tj_1b9fae99-ccfb-4f2e-9225-7eb67624ee5a/manager/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.628876 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-qv9zg_a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9/kube-rbac-proxy/0.log" Oct 09 16:15:45 crc kubenswrapper[4762]: I1009 16:15:45.936067 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-qv9zg_a2c39eb4-d8e3-4bd5-9e66-7f7a5bce2eb9/manager/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.058362 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-7g4mb_d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c/kube-rbac-proxy/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.130868 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-7g4mb_d94fa52d-aee2-46eb-a4bb-1dd5ee5fa19c/manager/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.284080 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-pnlww_117e3f88-b1fd-4738-bd66-8c8e0e25a488/kube-rbac-proxy/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.377506 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-pnlww_117e3f88-b1fd-4738-bd66-8c8e0e25a488/manager/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.522315 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-tdnrr_0024a375-268b-4c89-ad32-2b3876e271af/kube-rbac-proxy/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.532178 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-tdnrr_0024a375-268b-4c89-ad32-2b3876e271af/manager/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.621317 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6s6tx_4141c889-724a-4a6f-886c-d1b6fa852d0f/kube-rbac-proxy/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.844109 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6s6tx_4141c889-724a-4a6f-886c-d1b6fa852d0f/manager/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.846071 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-b2j7z_3a72da9f-8fa2-4aa5-aab7-2175f034ede8/kube-rbac-proxy/0.log" Oct 09 16:15:46 crc kubenswrapper[4762]: I1009 16:15:46.921983 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-b2j7z_3a72da9f-8fa2-4aa5-aab7-2175f034ede8/manager/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.071468 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-qz69s_32fef3bc-b4cb-460e-8d36-0ba75c16d394/kube-rbac-proxy/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.275270 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-t8zhf_812a090b-267a-4899-a41c-e51592e6ca5b/kube-rbac-proxy/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.316556 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-qz69s_32fef3bc-b4cb-460e-8d36-0ba75c16d394/manager/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.369817 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-t8zhf_812a090b-267a-4899-a41c-e51592e6ca5b/manager/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.545963 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5458df59d8mmjs7_98aca1bd-63ee-4285-a903-64cd82c6226c/kube-rbac-proxy/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.593145 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5458df59d8mmjs7_98aca1bd-63ee-4285-a903-64cd82c6226c/manager/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.660920 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-694c8c648f-5xrql_cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec/kube-rbac-proxy/0.log" Oct 09 16:15:47 crc kubenswrapper[4762]: I1009 16:15:47.885318 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6747c5c7c8-jjnz6_d3668269-2baf-43a1-9444-dddfb7a169b7/kube-rbac-proxy/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.067687 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6747c5c7c8-jjnz6_d3668269-2baf-43a1-9444-dddfb7a169b7/operator/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.143079 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-g9lz8_de824a72-d0e7-4aff-802e-c181778233b2/registry-server/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.234794 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-pj45b_8b396a39-6575-426e-b333-da637fbe5616/kube-rbac-proxy/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.471842 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-mk2fh_cf87e411-c213-4287-bd23-381ea5be1a1b/kube-rbac-proxy/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.486115 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-pj45b_8b396a39-6575-426e-b333-da637fbe5616/manager/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.518909 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-mk2fh_cf87e411-c213-4287-bd23-381ea5be1a1b/manager/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.747814 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-lwj44_3660bde9-a2d9-43ee-8052-823fdc1f5db9/kube-rbac-proxy/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.761773 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-l647r_18635a37-db18-44f8-94a2-1245456d943a/operator/0.log" Oct 09 16:15:48 crc kubenswrapper[4762]: I1009 16:15:48.969626 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-lwj44_3660bde9-a2d9-43ee-8052-823fdc1f5db9/manager/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.021610 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85fd6d6f45-h6cz8_2bc7bc27-a390-4830-88cc-2a94e1326a09/kube-rbac-proxy/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.219800 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-xgnqp_6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2/kube-rbac-proxy/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.286524 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-xgnqp_6ea12cd7-1cd9-4cbd-a881-0bbb334b23e2/manager/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.408026 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85fd6d6f45-h6cz8_2bc7bc27-a390-4830-88cc-2a94e1326a09/manager/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.432051 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-rx4k9_2acaeb4c-8968-4dc3-9d61-0ffe8389067d/kube-rbac-proxy/0.log" Oct 09 16:15:49 crc kubenswrapper[4762]: I1009 16:15:49.569060 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-rx4k9_2acaeb4c-8968-4dc3-9d61-0ffe8389067d/manager/0.log" Oct 09 16:15:50 crc kubenswrapper[4762]: I1009 16:15:50.321078 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-694c8c648f-5xrql_cdc9b29d-6c7e-4e07-82a7-3b2fe39d45ec/manager/0.log" Oct 09 16:16:08 crc kubenswrapper[4762]: I1009 16:16:08.221907 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-c599r_76be61d0-03ad-4822-8097-2ef2bde86bf1/control-plane-machine-set-operator/0.log" Oct 09 16:16:08 crc kubenswrapper[4762]: I1009 16:16:08.418584 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pmdsg_bf4cd740-a799-47d8-9ce6-88bce9afa952/kube-rbac-proxy/0.log" Oct 09 16:16:08 crc kubenswrapper[4762]: I1009 16:16:08.498872 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pmdsg_bf4cd740-a799-47d8-9ce6-88bce9afa952/machine-api-operator/0.log" Oct 09 16:16:20 crc kubenswrapper[4762]: I1009 16:16:20.769585 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-lsnbv_3d68cf04-0159-4fe6-9346-750541b4acb2/cert-manager-controller/0.log" Oct 09 16:16:20 crc kubenswrapper[4762]: I1009 16:16:20.973083 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-xphxw_f5e1b445-a576-4876-816a-2ee258514618/cert-manager-cainjector/0.log" Oct 09 16:16:20 crc kubenswrapper[4762]: I1009 16:16:20.993268 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-6zmq2_ecb933a6-29c1-4e1e-b774-7a3e28eedcfd/cert-manager-webhook/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.388565 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-ntz88_54b5d227-e033-49ec-b96f-74ec617c74cd/nmstate-console-plugin/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.585378 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-hvkc5_cfd67a5a-8008-4191-a683-5f5c19ccc8c9/nmstate-handler/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.699540 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-wgxbz_334a2699-c1e4-42e0-b2ce-a2f699f1347c/kube-rbac-proxy/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.729176 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-wgxbz_334a2699-c1e4-42e0-b2ce-a2f699f1347c/nmstate-metrics/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.856324 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-bskzv_6abae913-1b37-4721-941f-5aa3b6803c77/nmstate-operator/0.log" Oct 09 16:16:33 crc kubenswrapper[4762]: I1009 16:16:33.959624 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-cpm9k_2e9b466f-eddb-464a-b245-1008e19793b0/nmstate-webhook/0.log" Oct 09 16:16:41 crc kubenswrapper[4762]: I1009 16:16:41.970763 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:16:41 crc kubenswrapper[4762]: I1009 16:16:41.971398 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.147228 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-5jcj5_7ba03cf9-3a22-4979-8c03-3f533b7e556a/kube-rbac-proxy/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.437645 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-frr-files/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.607424 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-frr-files/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.693705 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-5jcj5_7ba03cf9-3a22-4979-8c03-3f533b7e556a/controller/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.701242 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-reloader/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.719497 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-metrics/0.log" Oct 09 16:16:47 crc kubenswrapper[4762]: I1009 16:16:47.833346 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-reloader/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.015849 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-frr-files/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.016483 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-metrics/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.048372 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-reloader/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.048445 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-metrics/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.212281 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/controller/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.215961 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-frr-files/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.242825 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-reloader/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.273310 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/cp-metrics/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.409484 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/frr-metrics/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.429008 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/kube-rbac-proxy/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.500258 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/kube-rbac-proxy-frr/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.641800 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/reloader/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.735675 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-kt8dm_04ba99a6-67e1-4aba-a037-2c47a60a992e/frr-k8s-webhook-server/0.log" Oct 09 16:16:48 crc kubenswrapper[4762]: I1009 16:16:48.928900 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-664b8674b4-xnl9g_22ec464f-bf5d-4242-afc0-16f41e2c4fca/manager/0.log" Oct 09 16:16:49 crc kubenswrapper[4762]: I1009 16:16:49.124152 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5ff9fd58c6-tgcw5_72251ab1-5239-4ca0-83c4-d5897e76631d/webhook-server/0.log" Oct 09 16:16:49 crc kubenswrapper[4762]: I1009 16:16:49.206502 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-p7rwn_c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8/kube-rbac-proxy/0.log" Oct 09 16:16:50 crc kubenswrapper[4762]: I1009 16:16:50.517616 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-p7rwn_c19ba7bc-b0a1-4e8b-98e5-56bd395aacc8/speaker/0.log" Oct 09 16:16:51 crc kubenswrapper[4762]: I1009 16:16:51.997056 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-72zw7_0831f7e0-0729-47bc-b78d-cd6594ac3102/frr/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.351280 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/util/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.458732 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/util/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.534252 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/pull/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.541684 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/pull/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.736162 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/util/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.742605 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/extract/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.763020 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb692jxgx_24e5b893-0118-47dc-a409-8809d8bc7eb2/pull/0.log" Oct 09 16:17:02 crc kubenswrapper[4762]: I1009 16:17:02.898073 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.088393 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.135599 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.146313 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.339389 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.343932 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.361552 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d27jg4b_4b618f60-c8f7-4334-85cc-165b9c972adf/extract/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.512470 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.681382 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.686063 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.712149 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.859027 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/util/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.887992 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/pull/0.log" Oct 09 16:17:03 crc kubenswrapper[4762]: I1009 16:17:03.888679 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dccnsj_611c58bf-4757-4af3-adfb-6e13935f07ae/extract/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.071583 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-utilities/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.246251 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-content/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.259445 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-content/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.275112 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-utilities/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.416717 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-utilities/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.451789 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/extract-content/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.704224 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-utilities/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.860864 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-utilities/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.880859 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-content/0.log" Oct 09 16:17:04 crc kubenswrapper[4762]: I1009 16:17:04.999077 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-content/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.186807 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-content/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.266858 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/extract-utilities/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.543592 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/util/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.776182 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-22x2q_80ca7107-df0d-4c7a-8bfb-9d4b0dc4f47a/registry-server/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.830239 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/pull/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.867484 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/util/0.log" Oct 09 16:17:05 crc kubenswrapper[4762]: I1009 16:17:05.991448 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/pull/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.231003 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/pull/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.260432 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/util/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.261076 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cfrq6x_b49f0a42-424a-4b34-8adf-904ac4164e8f/extract/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.431937 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bkd7x_7f6d1478-3db5-4052-ba0f-6ede9c7e06d7/marketplace-operator/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.544738 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-utilities/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.785676 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-content/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.816614 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-content/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.831883 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-utilities/0.log" Oct 09 16:17:06 crc kubenswrapper[4762]: I1009 16:17:06.836610 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7ldxj_31e5de4c-951e-4d10-b219-b7ba26c5e991/registry-server/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.008221 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-content/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.076245 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/extract-utilities/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.123901 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-utilities/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.311891 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-72l8h_5621da56-0dc3-4cb7-867f-192959d65ddf/registry-server/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.360937 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-utilities/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.415277 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-content/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.439724 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-content/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.551476 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-utilities/0.log" Oct 09 16:17:07 crc kubenswrapper[4762]: I1009 16:17:07.605890 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/extract-content/0.log" Oct 09 16:17:08 crc kubenswrapper[4762]: I1009 16:17:08.672095 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2v5hs_c2063e06-c068-4526-b785-8ff55d071770/registry-server/0.log" Oct 09 16:17:11 crc kubenswrapper[4762]: I1009 16:17:11.969262 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:17:11 crc kubenswrapper[4762]: I1009 16:17:11.969833 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:17:19 crc kubenswrapper[4762]: I1009 16:17:19.191307 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-5nq4g_b162ca85-dc74-4946-b748-31ff9dec85ba/prometheus-operator/0.log" Oct 09 16:17:19 crc kubenswrapper[4762]: I1009 16:17:19.314208 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5d9645b546-5qnkj_70db938f-6f48-4cb0-b6dd-6d890a018e09/prometheus-operator-admission-webhook/0.log" Oct 09 16:17:19 crc kubenswrapper[4762]: I1009 16:17:19.445723 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5d9645b546-kwvq7_5fe38dbb-430f-436e-a456-13a213f99aba/prometheus-operator-admission-webhook/0.log" Oct 09 16:17:19 crc kubenswrapper[4762]: I1009 16:17:19.562180 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-9fms8_7b19a695-02c5-421b-bc49-695416a04bfe/operator/0.log" Oct 09 16:17:19 crc kubenswrapper[4762]: I1009 16:17:19.645532 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-nvcqk_b118f530-9c1b-4e99-b633-6759db10e9cb/perses-operator/0.log" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.020306 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:35 crc kubenswrapper[4762]: E1009 16:17:35.022443 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00ebfa0c-ce5d-4713-b2f8-646ddb28417a" containerName="collect-profiles" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.022575 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="00ebfa0c-ce5d-4713-b2f8-646ddb28417a" containerName="collect-profiles" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.024041 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="00ebfa0c-ce5d-4713-b2f8-646ddb28417a" containerName="collect-profiles" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.026660 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.034919 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.134819 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdxwt\" (UniqueName: \"kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.135025 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.135074 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.236849 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.237187 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdxwt\" (UniqueName: \"kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.237268 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.237603 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.237668 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.309669 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdxwt\" (UniqueName: \"kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt\") pod \"community-operators-kc8kp\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:35 crc kubenswrapper[4762]: I1009 16:17:35.353495 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:36 crc kubenswrapper[4762]: I1009 16:17:36.117415 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:36 crc kubenswrapper[4762]: I1009 16:17:36.436190 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerStarted","Data":"a9faf4fc4a1faddbe33ddf648c5aefe7c91b77af49af34fc518eae181ec01805"} Oct 09 16:17:37 crc kubenswrapper[4762]: I1009 16:17:37.461595 4762 generic.go:334] "Generic (PLEG): container finished" podID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerID="ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a" exitCode=0 Oct 09 16:17:37 crc kubenswrapper[4762]: I1009 16:17:37.461698 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerDied","Data":"ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a"} Oct 09 16:17:37 crc kubenswrapper[4762]: I1009 16:17:37.468227 4762 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.395815 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.398465 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.430933 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.549596 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.549930 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q75jh\" (UniqueName: \"kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.550076 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.651996 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.652199 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q75jh\" (UniqueName: \"kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.652271 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.652481 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.652613 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.672391 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q75jh\" (UniqueName: \"kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh\") pod \"redhat-operators-f77n2\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:39 crc kubenswrapper[4762]: I1009 16:17:39.753508 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:40 crc kubenswrapper[4762]: I1009 16:17:40.342840 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:17:40 crc kubenswrapper[4762]: I1009 16:17:40.495253 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerStarted","Data":"593f26b508433bb18aadbdf13a4d1d9cd51b987f2c6e42c979854085423670bb"} Oct 09 16:17:40 crc kubenswrapper[4762]: I1009 16:17:40.498358 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerStarted","Data":"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5"} Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.508823 4762 generic.go:334] "Generic (PLEG): container finished" podID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerID="4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6" exitCode=0 Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.508932 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerDied","Data":"4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6"} Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.969696 4762 patch_prober.go:28] interesting pod/machine-config-daemon-5v6hv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.970374 4762 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.970523 4762 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.971413 4762 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3"} pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 16:17:41 crc kubenswrapper[4762]: I1009 16:17:41.971561 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" containerName="machine-config-daemon" containerID="cri-o://f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" gracePeriod=600 Oct 09 16:17:42 crc kubenswrapper[4762]: E1009 16:17:42.137433 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.523847 4762 generic.go:334] "Generic (PLEG): container finished" podID="366049a3-acf6-488c-9f93-4557528d6d14" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" exitCode=0 Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.523939 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" event={"ID":"366049a3-acf6-488c-9f93-4557528d6d14","Type":"ContainerDied","Data":"f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3"} Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.523976 4762 scope.go:117] "RemoveContainer" containerID="23a95be0b431c4a958f1dd252c8aa60f23fd4d4c19a6acb05c42292d19be3d9d" Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.524585 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:17:42 crc kubenswrapper[4762]: E1009 16:17:42.525008 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.530360 4762 generic.go:334] "Generic (PLEG): container finished" podID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerID="04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5" exitCode=0 Oct 09 16:17:42 crc kubenswrapper[4762]: I1009 16:17:42.530414 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerDied","Data":"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5"} Oct 09 16:17:42 crc kubenswrapper[4762]: E1009 16:17:42.607015 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee7ae2a5_8acf_4136_9eab_25cc8fa2bdfc.slice/crio-conmon-04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee7ae2a5_8acf_4136_9eab_25cc8fa2bdfc.slice/crio-04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5.scope\": RecentStats: unable to find data in memory cache]" Oct 09 16:17:43 crc kubenswrapper[4762]: I1009 16:17:43.544876 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerStarted","Data":"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814"} Oct 09 16:17:44 crc kubenswrapper[4762]: I1009 16:17:44.558833 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerStarted","Data":"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1"} Oct 09 16:17:44 crc kubenswrapper[4762]: I1009 16:17:44.580319 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kc8kp" podStartSLOduration=4.514596698 podStartE2EDuration="10.580302175s" podCreationTimestamp="2025-10-09 16:17:34 +0000 UTC" firstStartedPulling="2025-10-09 16:17:37.467971418 +0000 UTC m=+10333.241762457" lastFinishedPulling="2025-10-09 16:17:43.533676895 +0000 UTC m=+10339.307467934" observedRunningTime="2025-10-09 16:17:44.574666917 +0000 UTC m=+10340.348457976" watchObservedRunningTime="2025-10-09 16:17:44.580302175 +0000 UTC m=+10340.354093214" Oct 09 16:17:45 crc kubenswrapper[4762]: I1009 16:17:45.354242 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:45 crc kubenswrapper[4762]: I1009 16:17:45.354317 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:46 crc kubenswrapper[4762]: I1009 16:17:46.420304 4762 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-kc8kp" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="registry-server" probeResult="failure" output=< Oct 09 16:17:46 crc kubenswrapper[4762]: timeout: failed to connect service ":50051" within 1s Oct 09 16:17:46 crc kubenswrapper[4762]: > Oct 09 16:17:52 crc kubenswrapper[4762]: E1009 16:17:52.901070 4762 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a3b8aab_4f2b_4e36_84be_b6f00a9ec0e7.slice/crio-cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814.scope\": RecentStats: unable to find data in memory cache]" Oct 09 16:17:53 crc kubenswrapper[4762]: I1009 16:17:53.653790 4762 generic.go:334] "Generic (PLEG): container finished" podID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerID="cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814" exitCode=0 Oct 09 16:17:53 crc kubenswrapper[4762]: I1009 16:17:53.653853 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerDied","Data":"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814"} Oct 09 16:17:54 crc kubenswrapper[4762]: I1009 16:17:54.665590 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerStarted","Data":"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7"} Oct 09 16:17:54 crc kubenswrapper[4762]: I1009 16:17:54.697532 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f77n2" podStartSLOduration=2.9695958019999997 podStartE2EDuration="15.697509424s" podCreationTimestamp="2025-10-09 16:17:39 +0000 UTC" firstStartedPulling="2025-10-09 16:17:41.512515388 +0000 UTC m=+10337.286306427" lastFinishedPulling="2025-10-09 16:17:54.24042901 +0000 UTC m=+10350.014220049" observedRunningTime="2025-10-09 16:17:54.684399042 +0000 UTC m=+10350.458190101" watchObservedRunningTime="2025-10-09 16:17:54.697509424 +0000 UTC m=+10350.471300463" Oct 09 16:17:55 crc kubenswrapper[4762]: I1009 16:17:55.411534 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:55 crc kubenswrapper[4762]: I1009 16:17:55.460439 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:55 crc kubenswrapper[4762]: I1009 16:17:55.905027 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:55 crc kubenswrapper[4762]: I1009 16:17:55.965149 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:17:55 crc kubenswrapper[4762]: E1009 16:17:55.965449 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:17:56 crc kubenswrapper[4762]: I1009 16:17:56.683337 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kc8kp" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="registry-server" containerID="cri-o://281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1" gracePeriod=2 Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.236911 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.364307 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content\") pod \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.364698 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdxwt\" (UniqueName: \"kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt\") pod \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.364842 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities\") pod \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\" (UID: \"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc\") " Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.366837 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities" (OuterVolumeSpecName: "utilities") pod "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" (UID: "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.385952 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt" (OuterVolumeSpecName: "kube-api-access-sdxwt") pod "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" (UID: "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc"). InnerVolumeSpecName "kube-api-access-sdxwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.423611 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" (UID: "ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.467963 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.467996 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.468008 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdxwt\" (UniqueName: \"kubernetes.io/projected/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc-kube-api-access-sdxwt\") on node \"crc\" DevicePath \"\"" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.698587 4762 generic.go:334] "Generic (PLEG): container finished" podID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerID="281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1" exitCode=0 Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.698710 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kc8kp" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.698742 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerDied","Data":"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1"} Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.698776 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kc8kp" event={"ID":"ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc","Type":"ContainerDied","Data":"a9faf4fc4a1faddbe33ddf648c5aefe7c91b77af49af34fc518eae181ec01805"} Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.698798 4762 scope.go:117] "RemoveContainer" containerID="281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.730605 4762 scope.go:117] "RemoveContainer" containerID="04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.740194 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.754962 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kc8kp"] Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.758112 4762 scope.go:117] "RemoveContainer" containerID="ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.816413 4762 scope.go:117] "RemoveContainer" containerID="281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1" Oct 09 16:17:57 crc kubenswrapper[4762]: E1009 16:17:57.816870 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1\": container with ID starting with 281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1 not found: ID does not exist" containerID="281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.816922 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1"} err="failed to get container status \"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1\": rpc error: code = NotFound desc = could not find container \"281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1\": container with ID starting with 281a0b61ab590c1907b8c670fba97576792b55052e31b4d7416c5e6e3e090ca1 not found: ID does not exist" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.816957 4762 scope.go:117] "RemoveContainer" containerID="04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5" Oct 09 16:17:57 crc kubenswrapper[4762]: E1009 16:17:57.821830 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5\": container with ID starting with 04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5 not found: ID does not exist" containerID="04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.821875 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5"} err="failed to get container status \"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5\": rpc error: code = NotFound desc = could not find container \"04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5\": container with ID starting with 04f024e7870b342393eb1c91c793dbca9498bd9e0568872b874f024bf2cb48a5 not found: ID does not exist" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.821924 4762 scope.go:117] "RemoveContainer" containerID="ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a" Oct 09 16:17:57 crc kubenswrapper[4762]: E1009 16:17:57.822197 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a\": container with ID starting with ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a not found: ID does not exist" containerID="ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a" Oct 09 16:17:57 crc kubenswrapper[4762]: I1009 16:17:57.822228 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a"} err="failed to get container status \"ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a\": rpc error: code = NotFound desc = could not find container \"ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a\": container with ID starting with ae090d76d87b03074a000cdf14c949ddedcb235a5896375c3e3cf27d7c91967a not found: ID does not exist" Oct 09 16:17:58 crc kubenswrapper[4762]: I1009 16:17:58.978782 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" path="/var/lib/kubelet/pods/ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc/volumes" Oct 09 16:17:59 crc kubenswrapper[4762]: I1009 16:17:59.754157 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:59 crc kubenswrapper[4762]: I1009 16:17:59.754497 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:17:59 crc kubenswrapper[4762]: I1009 16:17:59.813132 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:18:00 crc kubenswrapper[4762]: I1009 16:18:00.789805 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:18:01 crc kubenswrapper[4762]: I1009 16:18:01.315260 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:18:02 crc kubenswrapper[4762]: I1009 16:18:02.770265 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f77n2" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="registry-server" containerID="cri-o://b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7" gracePeriod=2 Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.302285 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.420429 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") pod \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.420500 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities\") pod \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.420528 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q75jh\" (UniqueName: \"kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh\") pod \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.422040 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities" (OuterVolumeSpecName: "utilities") pod "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" (UID: "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.431972 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh" (OuterVolumeSpecName: "kube-api-access-q75jh") pod "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" (UID: "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7"). InnerVolumeSpecName "kube-api-access-q75jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.522963 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" (UID: "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.523525 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") pod \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\" (UID: \"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7\") " Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.524225 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.524252 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q75jh\" (UniqueName: \"kubernetes.io/projected/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-kube-api-access-q75jh\") on node \"crc\" DevicePath \"\"" Oct 09 16:18:03 crc kubenswrapper[4762]: W1009 16:18:03.523728 4762 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7/volumes/kubernetes.io~empty-dir/catalog-content Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.524803 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" (UID: "4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.626597 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.781052 4762 generic.go:334] "Generic (PLEG): container finished" podID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerID="b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7" exitCode=0 Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.781106 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerDied","Data":"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7"} Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.781135 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f77n2" event={"ID":"4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7","Type":"ContainerDied","Data":"593f26b508433bb18aadbdf13a4d1d9cd51b987f2c6e42c979854085423670bb"} Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.781156 4762 scope.go:117] "RemoveContainer" containerID="b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.781230 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f77n2" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.809313 4762 scope.go:117] "RemoveContainer" containerID="cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.824002 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.829894 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f77n2"] Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.839675 4762 scope.go:117] "RemoveContainer" containerID="4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.894219 4762 scope.go:117] "RemoveContainer" containerID="b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7" Oct 09 16:18:03 crc kubenswrapper[4762]: E1009 16:18:03.894683 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7\": container with ID starting with b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7 not found: ID does not exist" containerID="b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.894716 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7"} err="failed to get container status \"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7\": rpc error: code = NotFound desc = could not find container \"b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7\": container with ID starting with b0c33e9c4f4beb487df502ff3272ce308c931437955ab57966e37e3a12bc39a7 not found: ID does not exist" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.894738 4762 scope.go:117] "RemoveContainer" containerID="cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814" Oct 09 16:18:03 crc kubenswrapper[4762]: E1009 16:18:03.895022 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814\": container with ID starting with cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814 not found: ID does not exist" containerID="cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.895048 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814"} err="failed to get container status \"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814\": rpc error: code = NotFound desc = could not find container \"cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814\": container with ID starting with cfa15ab83564704b405aff6c32aa8eb554fc7079010350d75ccad47c8c07f814 not found: ID does not exist" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.895061 4762 scope.go:117] "RemoveContainer" containerID="4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6" Oct 09 16:18:03 crc kubenswrapper[4762]: E1009 16:18:03.895901 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6\": container with ID starting with 4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6 not found: ID does not exist" containerID="4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6" Oct 09 16:18:03 crc kubenswrapper[4762]: I1009 16:18:03.895951 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6"} err="failed to get container status \"4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6\": rpc error: code = NotFound desc = could not find container \"4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6\": container with ID starting with 4be60770992609cb083f84d7125c9bacd4986ec8b9aaf7ab60323d47d95a2bf6 not found: ID does not exist" Oct 09 16:18:04 crc kubenswrapper[4762]: I1009 16:18:04.976302 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" path="/var/lib/kubelet/pods/4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7/volumes" Oct 09 16:18:07 crc kubenswrapper[4762]: I1009 16:18:07.965474 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:18:07 crc kubenswrapper[4762]: E1009 16:18:07.966109 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:18:20 crc kubenswrapper[4762]: I1009 16:18:20.965706 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:18:20 crc kubenswrapper[4762]: E1009 16:18:20.966544 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:18:34 crc kubenswrapper[4762]: I1009 16:18:34.973770 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:18:34 crc kubenswrapper[4762]: E1009 16:18:34.974616 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:18:40 crc kubenswrapper[4762]: I1009 16:18:40.462239 4762 scope.go:117] "RemoveContainer" containerID="52622cc36b74d5f69545020d520e5f8084765d54d5ea732ec4b5ec29f10d232e" Oct 09 16:18:47 crc kubenswrapper[4762]: I1009 16:18:47.965926 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:18:47 crc kubenswrapper[4762]: E1009 16:18:47.966752 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:18:59 crc kubenswrapper[4762]: I1009 16:18:59.965505 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:18:59 crc kubenswrapper[4762]: E1009 16:18:59.966362 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:19:12 crc kubenswrapper[4762]: I1009 16:19:12.965371 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:19:12 crc kubenswrapper[4762]: E1009 16:19:12.966184 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:19:26 crc kubenswrapper[4762]: I1009 16:19:26.966046 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:19:26 crc kubenswrapper[4762]: E1009 16:19:26.966844 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:19:38 crc kubenswrapper[4762]: I1009 16:19:38.751336 4762 generic.go:334] "Generic (PLEG): container finished" podID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerID="9017956e70993ef91cc3d8c3525429b6552139170f028d01d2040e77fe5a9a24" exitCode=0 Oct 09 16:19:38 crc kubenswrapper[4762]: I1009 16:19:38.751424 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" event={"ID":"91ed9013-99d2-4cb1-a5d5-50782c7e1de6","Type":"ContainerDied","Data":"9017956e70993ef91cc3d8c3525429b6552139170f028d01d2040e77fe5a9a24"} Oct 09 16:19:38 crc kubenswrapper[4762]: I1009 16:19:38.753718 4762 scope.go:117] "RemoveContainer" containerID="9017956e70993ef91cc3d8c3525429b6552139170f028d01d2040e77fe5a9a24" Oct 09 16:19:38 crc kubenswrapper[4762]: I1009 16:19:38.824502 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wqwdn_must-gather-rgxb2_91ed9013-99d2-4cb1-a5d5-50782c7e1de6/gather/0.log" Oct 09 16:19:41 crc kubenswrapper[4762]: I1009 16:19:41.966198 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:19:41 crc kubenswrapper[4762]: E1009 16:19:41.967332 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.365772 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wqwdn/must-gather-rgxb2"] Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.366702 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="copy" containerID="cri-o://f669307b065a9cd7a1f74e7173ce08d354e5bca6cd1cd356cf3273ee8fa4892f" gracePeriod=2 Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.380906 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wqwdn/must-gather-rgxb2"] Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.853673 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wqwdn_must-gather-rgxb2_91ed9013-99d2-4cb1-a5d5-50782c7e1de6/copy/0.log" Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.853713 4762 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wqwdn_must-gather-rgxb2_91ed9013-99d2-4cb1-a5d5-50782c7e1de6/copy/0.log" Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.854340 4762 generic.go:334] "Generic (PLEG): container finished" podID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerID="f669307b065a9cd7a1f74e7173ce08d354e5bca6cd1cd356cf3273ee8fa4892f" exitCode=143 Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.854390 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89c4b400a8bce6355a6881d991991c4f1fc0779141072478932baf287c8ec774" Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.854729 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.917555 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output\") pod \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.917774 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xcgd\" (UniqueName: \"kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd\") pod \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\" (UID: \"91ed9013-99d2-4cb1-a5d5-50782c7e1de6\") " Oct 09 16:19:47 crc kubenswrapper[4762]: I1009 16:19:47.922861 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd" (OuterVolumeSpecName: "kube-api-access-5xcgd") pod "91ed9013-99d2-4cb1-a5d5-50782c7e1de6" (UID: "91ed9013-99d2-4cb1-a5d5-50782c7e1de6"). InnerVolumeSpecName "kube-api-access-5xcgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:19:48 crc kubenswrapper[4762]: I1009 16:19:48.020961 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xcgd\" (UniqueName: \"kubernetes.io/projected/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-kube-api-access-5xcgd\") on node \"crc\" DevicePath \"\"" Oct 09 16:19:48 crc kubenswrapper[4762]: I1009 16:19:48.096961 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "91ed9013-99d2-4cb1-a5d5-50782c7e1de6" (UID: "91ed9013-99d2-4cb1-a5d5-50782c7e1de6"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:19:48 crc kubenswrapper[4762]: I1009 16:19:48.123345 4762 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/91ed9013-99d2-4cb1-a5d5-50782c7e1de6-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 09 16:19:48 crc kubenswrapper[4762]: I1009 16:19:48.862820 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wqwdn/must-gather-rgxb2" Oct 09 16:19:48 crc kubenswrapper[4762]: I1009 16:19:48.981102 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" path="/var/lib/kubelet/pods/91ed9013-99d2-4cb1-a5d5-50782c7e1de6/volumes" Oct 09 16:19:53 crc kubenswrapper[4762]: I1009 16:19:53.966330 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:19:53 crc kubenswrapper[4762]: E1009 16:19:53.967786 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:20:07 crc kubenswrapper[4762]: I1009 16:20:07.965321 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:20:07 crc kubenswrapper[4762]: E1009 16:20:07.966406 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:20:22 crc kubenswrapper[4762]: I1009 16:20:22.966410 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:20:22 crc kubenswrapper[4762]: E1009 16:20:22.968521 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.597357 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.598782 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="extract-content" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.598809 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="extract-content" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.598844 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="extract-utilities" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.598860 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="extract-utilities" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.598890 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="extract-content" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.598900 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="extract-content" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.598932 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="copy" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.598942 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="copy" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.598981 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.598992 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.599036 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="extract-utilities" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599049 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="extract-utilities" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.599064 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="gather" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599075 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="gather" Oct 09 16:20:24 crc kubenswrapper[4762]: E1009 16:20:24.599093 4762 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599106 4762 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599684 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="copy" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599715 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ed9013-99d2-4cb1-a5d5-50782c7e1de6" containerName="gather" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599749 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee7ae2a5-8acf-4136-9eab-25cc8fa2bdfc" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.599770 4762 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a3b8aab-4f2b-4e36-84be-b6f00a9ec0e7" containerName="registry-server" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.603101 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.620481 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.722106 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6q6f\" (UniqueName: \"kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.722215 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.722331 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.759869 4762 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.763668 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.773617 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824093 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6q6f\" (UniqueName: \"kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824156 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824195 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824277 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824318 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82jzm\" (UniqueName: \"kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.824431 4762 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.825145 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.825145 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.847953 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6q6f\" (UniqueName: \"kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f\") pod \"certified-operators-mm5tq\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.926294 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.926427 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82jzm\" (UniqueName: \"kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.926493 4762 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.926863 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.926954 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.933940 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:24 crc kubenswrapper[4762]: I1009 16:20:24.949459 4762 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82jzm\" (UniqueName: \"kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm\") pod \"redhat-marketplace-q5hdb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:25 crc kubenswrapper[4762]: I1009 16:20:25.089170 4762 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:25 crc kubenswrapper[4762]: I1009 16:20:25.660584 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:25 crc kubenswrapper[4762]: W1009 16:20:25.668933 4762 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf9ab0fb_290a_4dfb_be28_b223387e3b84.slice/crio-31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d WatchSource:0}: Error finding container 31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d: Status 404 returned error can't find the container with id 31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d Oct 09 16:20:25 crc kubenswrapper[4762]: I1009 16:20:25.672844 4762 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.278401 4762 generic.go:334] "Generic (PLEG): container finished" podID="1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" containerID="1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7" exitCode=0 Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.278449 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerDied","Data":"1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7"} Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.278696 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerStarted","Data":"ebfdc48a582f911581afa6b9d18d757684f0771f3475cc78e44c01e6d97ff151"} Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.280790 4762 generic.go:334] "Generic (PLEG): container finished" podID="af9ab0fb-290a-4dfb-be28-b223387e3b84" containerID="2ceeb2a81c3df839642d94b057eb839b1a8c10dfcebf458ee9b2faf3fad2265c" exitCode=0 Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.280842 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerDied","Data":"2ceeb2a81c3df839642d94b057eb839b1a8c10dfcebf458ee9b2faf3fad2265c"} Oct 09 16:20:26 crc kubenswrapper[4762]: I1009 16:20:26.280895 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerStarted","Data":"31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d"} Oct 09 16:20:28 crc kubenswrapper[4762]: I1009 16:20:28.303462 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerStarted","Data":"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4"} Oct 09 16:20:28 crc kubenswrapper[4762]: I1009 16:20:28.305739 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerStarted","Data":"09c114b131d8d8ee876b6db87ed67e8df82de7467452e1ed4f06e8669f1a8608"} Oct 09 16:20:29 crc kubenswrapper[4762]: I1009 16:20:29.316529 4762 generic.go:334] "Generic (PLEG): container finished" podID="1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" containerID="379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4" exitCode=0 Oct 09 16:20:29 crc kubenswrapper[4762]: I1009 16:20:29.316620 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerDied","Data":"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4"} Oct 09 16:20:29 crc kubenswrapper[4762]: I1009 16:20:29.318880 4762 generic.go:334] "Generic (PLEG): container finished" podID="af9ab0fb-290a-4dfb-be28-b223387e3b84" containerID="09c114b131d8d8ee876b6db87ed67e8df82de7467452e1ed4f06e8669f1a8608" exitCode=0 Oct 09 16:20:29 crc kubenswrapper[4762]: I1009 16:20:29.318913 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerDied","Data":"09c114b131d8d8ee876b6db87ed67e8df82de7467452e1ed4f06e8669f1a8608"} Oct 09 16:20:31 crc kubenswrapper[4762]: I1009 16:20:31.343867 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerStarted","Data":"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d"} Oct 09 16:20:31 crc kubenswrapper[4762]: I1009 16:20:31.348047 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerStarted","Data":"5aaa9f957cfb6e495bb4f28264e47e7e3ae3f5bb056752cfc45448b3ff08c844"} Oct 09 16:20:31 crc kubenswrapper[4762]: I1009 16:20:31.368513 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q5hdb" podStartSLOduration=3.513196091 podStartE2EDuration="7.368487265s" podCreationTimestamp="2025-10-09 16:20:24 +0000 UTC" firstStartedPulling="2025-10-09 16:20:26.280288042 +0000 UTC m=+10502.054079091" lastFinishedPulling="2025-10-09 16:20:30.135579216 +0000 UTC m=+10505.909370265" observedRunningTime="2025-10-09 16:20:31.365394633 +0000 UTC m=+10507.139185672" watchObservedRunningTime="2025-10-09 16:20:31.368487265 +0000 UTC m=+10507.142278304" Oct 09 16:20:31 crc kubenswrapper[4762]: I1009 16:20:31.392654 4762 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mm5tq" podStartSLOduration=3.528418667 podStartE2EDuration="7.392618415s" podCreationTimestamp="2025-10-09 16:20:24 +0000 UTC" firstStartedPulling="2025-10-09 16:20:26.282577871 +0000 UTC m=+10502.056368910" lastFinishedPulling="2025-10-09 16:20:30.146777619 +0000 UTC m=+10505.920568658" observedRunningTime="2025-10-09 16:20:31.38937018 +0000 UTC m=+10507.163161219" watchObservedRunningTime="2025-10-09 16:20:31.392618415 +0000 UTC m=+10507.166409454" Oct 09 16:20:34 crc kubenswrapper[4762]: I1009 16:20:34.934889 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:34 crc kubenswrapper[4762]: I1009 16:20:34.935538 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:34 crc kubenswrapper[4762]: I1009 16:20:34.992438 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.090662 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.090731 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.154715 4762 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.436344 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.447433 4762 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:35 crc kubenswrapper[4762]: I1009 16:20:35.968445 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:20:35 crc kubenswrapper[4762]: E1009 16:20:35.969165 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:20:37 crc kubenswrapper[4762]: I1009 16:20:37.151969 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:37 crc kubenswrapper[4762]: I1009 16:20:37.406621 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q5hdb" podUID="1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" containerName="registry-server" containerID="cri-o://4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d" gracePeriod=2 Oct 09 16:20:37 crc kubenswrapper[4762]: I1009 16:20:37.761880 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:37 crc kubenswrapper[4762]: I1009 16:20:37.762305 4762 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mm5tq" podUID="af9ab0fb-290a-4dfb-be28-b223387e3b84" containerName="registry-server" containerID="cri-o://5aaa9f957cfb6e495bb4f28264e47e7e3ae3f5bb056752cfc45448b3ff08c844" gracePeriod=2 Oct 09 16:20:37 crc kubenswrapper[4762]: I1009 16:20:37.983875 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.139574 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82jzm\" (UniqueName: \"kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm\") pod \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.139682 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content\") pod \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.139982 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities\") pod \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\" (UID: \"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.142281 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities" (OuterVolumeSpecName: "utilities") pod "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" (UID: "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.148678 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm" (OuterVolumeSpecName: "kube-api-access-82jzm") pod "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" (UID: "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb"). InnerVolumeSpecName "kube-api-access-82jzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.155555 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" (UID: "1c4029bf-95d2-47e0-8d0b-8bad76b0fffb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.244705 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.244782 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82jzm\" (UniqueName: \"kubernetes.io/projected/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-kube-api-access-82jzm\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.244816 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.420864 4762 generic.go:334] "Generic (PLEG): container finished" podID="1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" containerID="4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d" exitCode=0 Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.420972 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerDied","Data":"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d"} Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.421062 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5hdb" event={"ID":"1c4029bf-95d2-47e0-8d0b-8bad76b0fffb","Type":"ContainerDied","Data":"ebfdc48a582f911581afa6b9d18d757684f0771f3475cc78e44c01e6d97ff151"} Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.421092 4762 scope.go:117] "RemoveContainer" containerID="4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.421251 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5hdb" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.425049 4762 generic.go:334] "Generic (PLEG): container finished" podID="af9ab0fb-290a-4dfb-be28-b223387e3b84" containerID="5aaa9f957cfb6e495bb4f28264e47e7e3ae3f5bb056752cfc45448b3ff08c844" exitCode=0 Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.425117 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerDied","Data":"5aaa9f957cfb6e495bb4f28264e47e7e3ae3f5bb056752cfc45448b3ff08c844"} Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.425160 4762 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm5tq" event={"ID":"af9ab0fb-290a-4dfb-be28-b223387e3b84","Type":"ContainerDied","Data":"31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d"} Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.425174 4762 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31d007c75fe7fe8e4db91e9412135cccd90c4d0837ef348d17de421d42bcd62d" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.443155 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.452438 4762 scope.go:117] "RemoveContainer" containerID="379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.532366 4762 scope.go:117] "RemoveContainer" containerID="1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.550376 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content\") pod \"af9ab0fb-290a-4dfb-be28-b223387e3b84\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.550430 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities\") pod \"af9ab0fb-290a-4dfb-be28-b223387e3b84\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.550683 4762 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6q6f\" (UniqueName: \"kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f\") pod \"af9ab0fb-290a-4dfb-be28-b223387e3b84\" (UID: \"af9ab0fb-290a-4dfb-be28-b223387e3b84\") " Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.557444 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities" (OuterVolumeSpecName: "utilities") pod "af9ab0fb-290a-4dfb-be28-b223387e3b84" (UID: "af9ab0fb-290a-4dfb-be28-b223387e3b84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.575577 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.596973 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f" (OuterVolumeSpecName: "kube-api-access-f6q6f") pod "af9ab0fb-290a-4dfb-be28-b223387e3b84" (UID: "af9ab0fb-290a-4dfb-be28-b223387e3b84"). InnerVolumeSpecName "kube-api-access-f6q6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.641915 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5hdb"] Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.647844 4762 scope.go:117] "RemoveContainer" containerID="4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d" Oct 09 16:20:38 crc kubenswrapper[4762]: E1009 16:20:38.657820 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d\": container with ID starting with 4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d not found: ID does not exist" containerID="4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.657877 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d"} err="failed to get container status \"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d\": rpc error: code = NotFound desc = could not find container \"4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d\": container with ID starting with 4a61cf8acbbb4deb2af7ef4d76479a727d42b16947ee6868f818cce44a4ea46d not found: ID does not exist" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.657909 4762 scope.go:117] "RemoveContainer" containerID="379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.658503 4762 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6q6f\" (UniqueName: \"kubernetes.io/projected/af9ab0fb-290a-4dfb-be28-b223387e3b84-kube-api-access-f6q6f\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.658528 4762 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: E1009 16:20:38.661555 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4\": container with ID starting with 379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4 not found: ID does not exist" containerID="379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.661607 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4"} err="failed to get container status \"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4\": rpc error: code = NotFound desc = could not find container \"379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4\": container with ID starting with 379da6ddd94925cbdb84e5a687276768ccf2caf6b2b20070eef23b8b82cb88c4 not found: ID does not exist" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.661658 4762 scope.go:117] "RemoveContainer" containerID="1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7" Oct 09 16:20:38 crc kubenswrapper[4762]: E1009 16:20:38.667315 4762 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7\": container with ID starting with 1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7 not found: ID does not exist" containerID="1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.667721 4762 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7"} err="failed to get container status \"1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7\": rpc error: code = NotFound desc = could not find container \"1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7\": container with ID starting with 1ef1f0860f20c4bb11d580c38e7015f716ee90c526226d8f60a3dede68f0d5e7 not found: ID does not exist" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.681743 4762 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af9ab0fb-290a-4dfb-be28-b223387e3b84" (UID: "af9ab0fb-290a-4dfb-be28-b223387e3b84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.764696 4762 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9ab0fb-290a-4dfb-be28-b223387e3b84-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 16:20:38 crc kubenswrapper[4762]: I1009 16:20:38.977151 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c4029bf-95d2-47e0-8d0b-8bad76b0fffb" path="/var/lib/kubelet/pods/1c4029bf-95d2-47e0-8d0b-8bad76b0fffb/volumes" Oct 09 16:20:39 crc kubenswrapper[4762]: I1009 16:20:39.435593 4762 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm5tq" Oct 09 16:20:39 crc kubenswrapper[4762]: I1009 16:20:39.466542 4762 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:39 crc kubenswrapper[4762]: I1009 16:20:39.480275 4762 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mm5tq"] Oct 09 16:20:40 crc kubenswrapper[4762]: I1009 16:20:40.596126 4762 scope.go:117] "RemoveContainer" containerID="f669307b065a9cd7a1f74e7173ce08d354e5bca6cd1cd356cf3273ee8fa4892f" Oct 09 16:20:40 crc kubenswrapper[4762]: I1009 16:20:40.690778 4762 scope.go:117] "RemoveContainer" containerID="9017956e70993ef91cc3d8c3525429b6552139170f028d01d2040e77fe5a9a24" Oct 09 16:20:40 crc kubenswrapper[4762]: I1009 16:20:40.982917 4762 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af9ab0fb-290a-4dfb-be28-b223387e3b84" path="/var/lib/kubelet/pods/af9ab0fb-290a-4dfb-be28-b223387e3b84/volumes" Oct 09 16:20:47 crc kubenswrapper[4762]: I1009 16:20:47.965742 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:20:47 crc kubenswrapper[4762]: E1009 16:20:47.966594 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:20:59 crc kubenswrapper[4762]: I1009 16:20:59.965935 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:20:59 crc kubenswrapper[4762]: E1009 16:20:59.966600 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:21:11 crc kubenswrapper[4762]: I1009 16:21:11.966940 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:21:11 crc kubenswrapper[4762]: E1009 16:21:11.967790 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:21:25 crc kubenswrapper[4762]: I1009 16:21:25.965994 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:21:25 crc kubenswrapper[4762]: E1009 16:21:25.966914 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:21:38 crc kubenswrapper[4762]: I1009 16:21:38.965347 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:21:38 crc kubenswrapper[4762]: E1009 16:21:38.967425 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:21:53 crc kubenswrapper[4762]: I1009 16:21:53.965505 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:21:53 crc kubenswrapper[4762]: E1009 16:21:53.966448 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" Oct 09 16:22:04 crc kubenswrapper[4762]: I1009 16:22:04.972169 4762 scope.go:117] "RemoveContainer" containerID="f1ab3a3c53d169fc11bc6dc98d2919a641c9497c49abe2c7639dbd0f38a2f9f3" Oct 09 16:22:04 crc kubenswrapper[4762]: E1009 16:22:04.973152 4762 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5v6hv_openshift-machine-config-operator(366049a3-acf6-488c-9f93-4557528d6d14)\"" pod="openshift-machine-config-operator/machine-config-daemon-5v6hv" podUID="366049a3-acf6-488c-9f93-4557528d6d14" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071760477024463 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071760500017363 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071733352016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071733352015463 5ustar corecore